Compare commits
3 Commits
192b0add77
...
b6ef66e057
| Author | SHA1 | Date | |
|---|---|---|---|
| b6ef66e057 | |||
| b0c3fa10fb | |||
| 6abb751ce8 |
29
.gitea/workflows/feedser-ci.yml
Normal file
29
.gitea/workflows/feedser-ci.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Feedser CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main", "develop"]
|
||||
pull_request:
|
||||
branches: ["main", "develop"]
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup .NET 10 preview
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.1.25451.107
|
||||
include-prerelease: true
|
||||
|
||||
- name: Restore dependencies
|
||||
run: dotnet restore src/StellaOps.Feedser/StellaOps.Feedser.sln
|
||||
|
||||
- name: Build
|
||||
run: dotnet build src/StellaOps.Feedser/StellaOps.Feedser.sln --configuration Release --no-restore -warnaserror
|
||||
|
||||
- name: Test
|
||||
run: dotnet test src/StellaOps.Feedser/StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --configuration Release --no-restore --logger "trx;LogFileName=feedser-tests.trx"
|
||||
87
.gitea/workflows/feedser-tests.yml
Normal file
87
.gitea/workflows/feedser-tests.yml
Normal file
@@ -0,0 +1,87 @@
|
||||
name: Feedser Tests CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- 'StellaOps.Feedser/**'
|
||||
- '.gitea/workflows/feedser-tests.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'StellaOps.Feedser/**'
|
||||
- '.gitea/workflows/feedser-tests.yml'
|
||||
|
||||
jobs:
|
||||
advisory-store-performance:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.1
|
||||
|
||||
- name: Restore dependencies
|
||||
working-directory: StellaOps.Feedser
|
||||
run: dotnet restore StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj
|
||||
|
||||
- name: Run advisory store performance test
|
||||
working-directory: StellaOps.Feedser
|
||||
run: |
|
||||
set -euo pipefail
|
||||
dotnet test \
|
||||
StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj \
|
||||
--filter "FullyQualifiedName~AdvisoryStorePerformanceTests" \
|
||||
--logger:"console;verbosity=detailed" | tee performance.log
|
||||
|
||||
- name: Upload performance log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: advisory-store-performance-log
|
||||
path: StellaOps.Feedser/performance.log
|
||||
|
||||
full-test-suite:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up .NET SDK
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: 10.0.100-rc.1
|
||||
|
||||
- name: Restore dependencies
|
||||
working-directory: StellaOps.Feedser
|
||||
run: dotnet restore StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj
|
||||
|
||||
- name: Run full test suite with baseline guard
|
||||
working-directory: StellaOps.Feedser
|
||||
env:
|
||||
BASELINE_SECONDS: "19.8"
|
||||
TOLERANCE_PERCENT: "25"
|
||||
run: |
|
||||
set -euo pipefail
|
||||
start=$(date +%s)
|
||||
dotnet test StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --no-build | tee full-tests.log
|
||||
end=$(date +%s)
|
||||
duration=$((end-start))
|
||||
echo "Full test duration: ${duration}s"
|
||||
export DURATION_SECONDS="$duration"
|
||||
python - <<'PY'
|
||||
import os, sys
|
||||
duration = float(os.environ["DURATION_SECONDS"])
|
||||
baseline = float(os.environ["BASELINE_SECONDS"])
|
||||
tolerance = float(os.environ["TOLERANCE_PERCENT"])
|
||||
threshold = baseline * (1 + tolerance / 100)
|
||||
print(f"Baseline {baseline:.1f}s, threshold {threshold:.1f}s, observed {duration:.1f}s")
|
||||
if duration > threshold:
|
||||
sys.exit(f"Full test duration {duration:.1f}s exceeded threshold {threshold:.1f}s")
|
||||
PY
|
||||
|
||||
- name: Upload full test log
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: full-test-suite-log
|
||||
path: StellaOps.Feedser/full-tests.log
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
# Build outputs
|
||||
bin/
|
||||
obj/
|
||||
*.pdb
|
||||
*.dll
|
||||
|
||||
# IDE state
|
||||
.vs/
|
||||
@@ -16,3 +18,4 @@ obj/
|
||||
*.log
|
||||
TestResults/
|
||||
|
||||
.dotnet
|
||||
46
src/Jobs.cs
Normal file
46
src/Jobs.cs
Normal file
@@ -0,0 +1,46 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
namespace StellaOps.Feedser.Source.Vndr.Oracle;
|
||||
|
||||
internal static class OracleJobKinds
|
||||
{
|
||||
public const string Fetch = "source:vndr-oracle:fetch";
|
||||
public const string Parse = "source:vndr-oracle:parse";
|
||||
public const string Map = "source:vndr-oracle:map";
|
||||
}
|
||||
|
||||
internal sealed class OracleFetchJob : IJob
|
||||
{
|
||||
private readonly OracleConnector _connector;
|
||||
|
||||
public OracleFetchJob(OracleConnector connector)
|
||||
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
|
||||
|
||||
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
|
||||
=> _connector.FetchAsync(context.Services, cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class OracleParseJob : IJob
|
||||
{
|
||||
private readonly OracleConnector _connector;
|
||||
|
||||
public OracleParseJob(OracleConnector connector)
|
||||
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
|
||||
|
||||
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
|
||||
=> _connector.ParseAsync(context.Services, cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class OracleMapJob : IJob
|
||||
{
|
||||
private readonly OracleConnector _connector;
|
||||
|
||||
public OracleMapJob(OracleConnector connector)
|
||||
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
|
||||
|
||||
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
|
||||
=> _connector.MapAsync(context.Services, cancellationToken);
|
||||
}
|
||||
293
src/OracleConnector.cs
Normal file
293
src/OracleConnector.cs
Normal file
@@ -0,0 +1,293 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Feedser.Source.Common;
|
||||
using StellaOps.Feedser.Source.Common.Fetch;
|
||||
using StellaOps.Feedser.Source.Vndr.Oracle.Configuration;
|
||||
using StellaOps.Feedser.Source.Vndr.Oracle.Internal;
|
||||
using StellaOps.Feedser.Storage.Mongo;
|
||||
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||
using StellaOps.Feedser.Storage.Mongo.Documents;
|
||||
using StellaOps.Feedser.Storage.Mongo.Dtos;
|
||||
using StellaOps.Feedser.Storage.Mongo.PsirtFlags;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Source.Vndr.Oracle;
|
||||
|
||||
public sealed class OracleConnector : IFeedConnector
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
private readonly SourceFetchService _fetchService;
|
||||
private readonly RawDocumentStorage _rawDocumentStorage;
|
||||
private readonly IDocumentStore _documentStore;
|
||||
private readonly IDtoStore _dtoStore;
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly IPsirtFlagStore _psirtFlagStore;
|
||||
private readonly ISourceStateRepository _stateRepository;
|
||||
private readonly OracleOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<OracleConnector> _logger;
|
||||
|
||||
public OracleConnector(
|
||||
SourceFetchService fetchService,
|
||||
RawDocumentStorage rawDocumentStorage,
|
||||
IDocumentStore documentStore,
|
||||
IDtoStore dtoStore,
|
||||
IAdvisoryStore advisoryStore,
|
||||
IPsirtFlagStore psirtFlagStore,
|
||||
ISourceStateRepository stateRepository,
|
||||
IOptions<OracleOptions> options,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<OracleConnector> logger)
|
||||
{
|
||||
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
|
||||
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
|
||||
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
|
||||
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_psirtFlagStore = psirtFlagStore ?? throw new ArgumentNullException(nameof(psirtFlagStore));
|
||||
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
|
||||
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_options.Validate();
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public string SourceName => VndrOracleConnectorPlugin.SourceName;
|
||||
|
||||
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
var pendingDocuments = cursor.PendingDocuments.ToList();
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
foreach (var uri in _options.AdvisoryUris)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var advisoryId = DeriveAdvisoryId(uri);
|
||||
var title = advisoryId.Replace('-', ' ');
|
||||
var published = now;
|
||||
|
||||
var metadata = OracleDocumentMetadata.CreateMetadata(advisoryId, title, published);
|
||||
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, uri.ToString(), cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var request = new SourceFetchRequest(OracleOptions.HttpClientName, SourceName, uri)
|
||||
{
|
||||
Metadata = metadata,
|
||||
ETag = existing?.Etag,
|
||||
LastModified = existing?.LastModified,
|
||||
AcceptHeaders = new[] { "text/html", "application/xhtml+xml", "text/plain;q=0.5" },
|
||||
};
|
||||
|
||||
var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (!result.IsSuccess || result.Document is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!pendingDocuments.Contains(result.Document.Id))
|
||||
{
|
||||
pendingDocuments.Add(result.Document.Id);
|
||||
}
|
||||
|
||||
if (_options.RequestDelay > TimeSpan.Zero)
|
||||
{
|
||||
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Oracle fetch failed for {Uri}", uri);
|
||||
await _stateRepository.MarkFailureAsync(SourceName, _timeProvider.GetUtcNow(), TimeSpan.FromMinutes(10), ex.Message, cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
var updatedCursor = cursor
|
||||
.WithPendingDocuments(pendingDocuments)
|
||||
.WithPendingMappings(pendingMappings)
|
||||
.WithLastProcessed(now);
|
||||
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (cursor.PendingDocuments.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var pendingDocuments = cursor.PendingDocuments.ToList();
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
|
||||
foreach (var documentId in cursor.PendingDocuments)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
if (document is null)
|
||||
{
|
||||
pendingDocuments.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!document.GridFsId.HasValue)
|
||||
{
|
||||
_logger.LogWarning("Oracle document {DocumentId} missing GridFS payload", document.Id);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
OracleDto dto;
|
||||
try
|
||||
{
|
||||
var metadata = OracleDocumentMetadata.FromDocument(document);
|
||||
var content = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
|
||||
var html = System.Text.Encoding.UTF8.GetString(content);
|
||||
dto = OracleParser.Parse(html, metadata);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Oracle parse failed for document {DocumentId}", document.Id);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var json = JsonSerializer.Serialize(dto, SerializerOptions);
|
||||
var payload = BsonDocument.Parse(json);
|
||||
var validatedAt = _timeProvider.GetUtcNow();
|
||||
|
||||
var existingDto = await _dtoStore.FindByDocumentIdAsync(document.Id, cancellationToken).ConfigureAwait(false);
|
||||
var dtoRecord = existingDto is null
|
||||
? new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "oracle.advisory.v1", payload, validatedAt)
|
||||
: existingDto with
|
||||
{
|
||||
Payload = payload,
|
||||
SchemaVersion = "oracle.advisory.v1",
|
||||
ValidatedAt = validatedAt,
|
||||
};
|
||||
|
||||
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
pendingDocuments.Remove(documentId);
|
||||
if (!pendingMappings.Contains(documentId))
|
||||
{
|
||||
pendingMappings.Add(documentId);
|
||||
}
|
||||
}
|
||||
|
||||
var updatedCursor = cursor
|
||||
.WithPendingDocuments(pendingDocuments)
|
||||
.WithPendingMappings(pendingMappings);
|
||||
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (cursor.PendingMappings.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
|
||||
foreach (var documentId in cursor.PendingMappings)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (dtoRecord is null || document is null)
|
||||
{
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
OracleDto? dto;
|
||||
try
|
||||
{
|
||||
var json = dtoRecord.Payload.ToJson();
|
||||
dto = JsonSerializer.Deserialize<OracleDto>(json, SerializerOptions);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Oracle DTO deserialization failed for document {DocumentId}", documentId);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (dto is null)
|
||||
{
|
||||
_logger.LogWarning("Oracle DTO payload deserialized as null for document {DocumentId}", documentId);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var mappedAt = _timeProvider.GetUtcNow();
|
||||
var (advisory, flag) = OracleMapper.Map(dto, SourceName, mappedAt);
|
||||
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
|
||||
await _psirtFlagStore.UpsertAsync(flag, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
pendingMappings.Remove(documentId);
|
||||
}
|
||||
|
||||
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task<OracleCursor> GetCursorAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
|
||||
return OracleCursor.FromBson(record?.Cursor);
|
||||
}
|
||||
|
||||
private async Task UpdateCursorAsync(OracleCursor cursor, CancellationToken cancellationToken)
|
||||
{
|
||||
var completedAt = _timeProvider.GetUtcNow();
|
||||
await _stateRepository.UpdateCursorAsync(SourceName, cursor.ToBsonDocument(), completedAt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static string DeriveAdvisoryId(Uri uri)
|
||||
{
|
||||
var segments = uri.Segments;
|
||||
if (segments.Length == 0)
|
||||
{
|
||||
return uri.AbsoluteUri;
|
||||
}
|
||||
|
||||
var slug = segments[^1].Trim('/');
|
||||
if (string.IsNullOrWhiteSpace(slug))
|
||||
{
|
||||
return uri.AbsoluteUri;
|
||||
}
|
||||
|
||||
return slug.Replace('.', '-');
|
||||
}
|
||||
}
|
||||
21
src/OracleConnectorPlugin.cs
Normal file
21
src/OracleConnectorPlugin.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Source.Vndr.Oracle;
|
||||
|
||||
public sealed class VndrOracleConnectorPlugin : IConnectorPlugin
|
||||
{
|
||||
public const string SourceName = "vndr-oracle";
|
||||
|
||||
public string Name => SourceName;
|
||||
|
||||
public bool IsAvailable(IServiceProvider services)
|
||||
=> services.GetService<OracleConnector>() is not null;
|
||||
|
||||
public IFeedConnector Create(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return services.GetRequiredService<OracleConnector>();
|
||||
}
|
||||
}
|
||||
54
src/OracleDependencyInjectionRoutine.cs
Normal file
54
src/OracleDependencyInjectionRoutine.cs
Normal file
@@ -0,0 +1,54 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.DependencyInjection;
|
||||
using StellaOps.Feedser.Core.Jobs;
|
||||
using StellaOps.Feedser.Source.Vndr.Oracle.Configuration;
|
||||
|
||||
namespace StellaOps.Feedser.Source.Vndr.Oracle;
|
||||
|
||||
public sealed class OracleDependencyInjectionRoutine : IDependencyInjectionRoutine
|
||||
{
|
||||
private const string ConfigurationSection = "feedser:sources:oracle";
|
||||
|
||||
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
services.AddOracleConnector(options =>
|
||||
{
|
||||
configuration.GetSection(ConfigurationSection).Bind(options);
|
||||
options.Validate();
|
||||
});
|
||||
|
||||
services.AddTransient<OracleFetchJob>();
|
||||
services.AddTransient<OracleParseJob>();
|
||||
services.AddTransient<OracleMapJob>();
|
||||
|
||||
services.PostConfigure<JobSchedulerOptions>(options =>
|
||||
{
|
||||
EnsureJob(options, OracleJobKinds.Fetch, typeof(OracleFetchJob));
|
||||
EnsureJob(options, OracleJobKinds.Parse, typeof(OracleParseJob));
|
||||
EnsureJob(options, OracleJobKinds.Map, typeof(OracleMapJob));
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType)
|
||||
{
|
||||
if (options.Definitions.ContainsKey(kind))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
options.Definitions[kind] = new JobDefinition(
|
||||
kind,
|
||||
jobType,
|
||||
options.DefaultTimeout,
|
||||
options.DefaultLeaseDuration,
|
||||
CronExpression: null,
|
||||
Enabled: true);
|
||||
}
|
||||
}
|
||||
@@ -1,718 +1,128 @@
|
||||
# AGENTS.md — Feedser (Stella Ops)
|
||||
## Autonomous Agent Instructions
|
||||
|
||||
> YOU ARE: a senior C#/.NET 10 (Preview 7, SDK 10.0.100-preview.7.25380.108) engineer‑agent tasked with building **Feedser**, a CLI that fetches, normalizes, reconciles, and packages *primary, non‑aggregated* vulnerability intelligence into a single **feed‑merge** database and exports a **Trivy‑compatible** DB (OCI artifact) for offline/self‑hosted scanning.
|
||||
> MODE: factual, deterministic, test‑first, with strict provenance and reproducibility.
|
||||
> BOUNDARIES: implement architecture & code in this repo only. No secret exfiltration. Default offline.
|
||||
Before you act you need to read `/src/StellaOps.Feedser/AGENTS.md` files `AGENTS.md`,`TASKS.md` in each working directory I gave you.
|
||||
|
||||
Boundaries:
|
||||
- You operate only in the working directories I gave you, unless there is dependencies that makes you to work on dependency in shared directory. Then you ask for confirmation.
|
||||
|
||||
Do:
|
||||
- Keep endpoints small, deterministic, and cancellation-aware.
|
||||
- Improve logs/metrics as per tasks.
|
||||
- Update `TASKS.md` when moving tasks forward.
|
||||
- When you are done with all task you state explicitly you are done.
|
||||
- Impersonate the role described on working directory `AGENTS.md` you will read, if role is not available - take role of the CTO of the StellaOps in early stages.
|
||||
|
||||
Output:
|
||||
- Summary of changes and any cross-module requests.
|
||||
|
||||
# StellaOps — Agent Operations Guide (Master)
|
||||
|
||||
> Purpose: Orient all human + autonomous agents to the StellaOps platform, its data flows, component boundaries, and rules of engagement so teams can work **in parallel on the same branch** with minimal contention.
|
||||
|
||||
---
|
||||
|
||||
## 0) Stella Ops — condensed context
|
||||
## 1) What is StellaOps?
|
||||
|
||||
**Stella Ops** is a container & infra security platform built for sovereign/offline operation. Key storylines: **Δ‑SBOM warm path**, nightly re‑checks, policy‑as‑code, **signed artifacts**, optional **AI** remediation, and regional/air‑gapped operation.
|
||||
**Feedser** is foundational: it powers the scanner by producing a unified, deduped, explainable vulnerability database and a **self‑hosted Trivy DB**.
|
||||
**StellaOps** is a sovereign/offline-first container & infrastructure security platform. Its core loop:
|
||||
|
||||
**Sibling components (stable contracts, no code here):**
|
||||
- **Scanner** (`stellaops.module.scanning`) — consumes Trivy‑compatible DB → findings + SBOM digests.
|
||||
- **Policy Engine**, **Signed Artifacts Service** (cosign), **AIRE** (AI suggestions), **SecretsScanner**, **MailDaemon**, **Offline Kit**, **RU/EEU adapters** (CryptoPro TLS, RU cert chains), **UI Shell**.
|
||||
1. **Intelligence** — Ingest vulnerability advisories from primary sources.
|
||||
2. **Normalization & Merge** — Reconcile into a canonical, deduplicated database with deterministic precedence.
|
||||
3. **Distribution** — Export a Trivy-compatible database (OCI artifact) and optional vuln-list JSON for self-hosted scanners.
|
||||
4. **Scanning & Policy** — Scanners consume that DB; policy engines gate deployments; artifacts may be signed downstream.
|
||||
|
||||
This repository’s focus is the **Feedser** service (ingest/merge/export).
|
||||
|
||||
---
|
||||
|
||||
## 1) Problem statement
|
||||
|
||||
1) **Fetch** authoritative *primary* sources (global + regional + PSIRT + distro + CERTs + ICS).
|
||||
2) **Parse & Normalize** to a **UnifiedVuln** model.
|
||||
3) **Reconcile/Deduplicate** deterministically across sources with precedence rules.
|
||||
4) **Persist** into **feed‑merge DB** with both **bootstrap‑from‑scratch** and **incremental refresh**.
|
||||
5) **Package & Publish**:
|
||||
- **Trivy DB (v2) OCI artifact** for scanners (`--db-repository`),
|
||||
- optional **vuln‑list–shaped JSON** tree (to reuse `trivy-db` builder),
|
||||
- optional **signed offline bundle**.
|
||||
|
||||
Non‑goals v0: building a new scanner or a custom Java DB; we only ensure Scanner can target our self‑hosted DB.
|
||||
|
||||
---
|
||||
|
||||
## 2) High‑level architecture
|
||||
## 2) High‑level architecture (technical overview)
|
||||
|
||||
```
|
||||
|
||||
[Connectors] ──► [Source DTO validation] ──► [Normalizer → UnifiedVuln]
|
||||
CVE/NVD, GHSA/OSV, JVN, CERT/CC, CISA KEV, KISA, CERT-In, ANSSI (CERT-FR),
|
||||
BSI (CERT-Bund WID), ACSC, CCCS, RU: BDU + NKCKI, Vendor PSIRTs (MSRC, Cisco,
|
||||
Oracle CPU, Adobe APSB, Apple, Chromium, VMware), Distros (Red Hat, Ubuntu,
|
||||
Debian, SUSE), ICS (CISA ICS, Kaspersky ICS-CERT)
|
||||
│
|
||||
▼
|
||||
[Merge/Reconcile Engine]
|
||||
(aliases, precedence, ranges, KEV flags, PSIRT flags)
|
||||
│
|
||||
▼
|
||||
[Feed‑Merge DB (SQLite→Postgres)]
|
||||
│
|
||||
┌──────────────────┴──────────────────┐
|
||||
▼ ▼
|
||||
[Export: vuln‑list JSON] [Packager: Trivy DB v2]
|
||||
│ │
|
||||
(CI) [ORAS push / offline tar]
|
||||
```
|
||||
[Primary Sources: NVD, GHSA/OSV, Distros, PSIRTs, CERTs, KEV, ICS]
|
||||
│
|
||||
(Fetch + Validate DTOs)
|
||||
▼
|
||||
[Normalizer → Canonical Advisory]
|
||||
│
|
||||
(Alias graph + Precedence)
|
||||
▼
|
||||
[Feed‑Merge Store (MongoDB)]
|
||||
│
|
||||
┌──────────────┴──────────────┐
|
||||
▼ ▼
|
||||
[Export: vuln‑list JSON] [Packager: Trivy DB (OCI)]
|
||||
│ │
|
||||
└────────────► Distribution (ORAS / offline bundle)
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
**Principles**
|
||||
- Determinism (same inputs → same outputs, hashed) and provenance per field.
|
||||
- OVAL (vendor/distro) **overrides** generic ranges for OS packages.
|
||||
- Regional feeds **enrich** rather than blindly override unless they carry stronger package‑level truth.
|
||||
**Key invariants**
|
||||
|
||||
- **Deterministic**: same inputs → same canonical JSON → same export digests.
|
||||
- **Precedence**: **distro OVAL/PSIRT > NVD** for OS packages; **KEV only flags exploitation**; regional CERTs enrich text/refs.
|
||||
- **Provenance** everywhere: source document, extraction method (`parser|llm`), and timestamps.
|
||||
|
||||
You have to read `./ARCHITECTURE.md` for more information.
|
||||
---
|
||||
|
||||
## 4) Main agents (roles, interactions, scope)
|
||||
|
||||
- **BE‑Base (Platform & Pipeline)**
|
||||
Owns DI, plugin host, job scheduler/coordinator, configuration binding, minimal API endpoints, and Mongo bootstrapping.
|
||||
- **BE‑Conn‑X (Connectors)**
|
||||
One agent per source family (NVD, Red Hat, Ubuntu, Debian, SUSE, GHSA, OSV, PSIRTs, CERTs, KEV, ICS). Implements fetch/parse/map with incremental watermarks.
|
||||
- **BE‑Merge (Canonical Merge & Dedupe)**
|
||||
Identity graph, precedence policies, canonical JSON serializer, and deterministic hashing (`merge_event`).
|
||||
- **BE‑Export (JSON & Trivy DB)**
|
||||
Deterministic export trees, Trivy DB packaging, optional ORAS push, and offline bundle.
|
||||
- **QA (Validation & Observability)**
|
||||
Schema tests, fixture goldens, determinism checks, metrics/logs/traces, e2e reproducibility runs.
|
||||
- **DevEx/Docs**
|
||||
Maintains this agent framework, templates, and per‑directory guides; assists parallelization and reviews.
|
||||
|
||||
**Interaction sketch**
|
||||
|
||||
- Connectors → **Core** jobs → **Storage.Mongo**
|
||||
- **Merge** refines canonical records; **Exporters** read canonical data to produce artifacts
|
||||
- **QA** spans all layers with fixtures/metrics and determinism checks
|
||||
|
||||
---
|
||||
|
||||
## 3) Repository layout (create exactly)
|
||||
## 5) Key technologies & integrations
|
||||
|
||||
```
|
||||
|
||||
src/Feedser/
|
||||
Feedser.Cli/ # .NET 10 preview console (System.CommandLine)
|
||||
Feedser.Core/ # domain model & orchestration
|
||||
Feedser.Storage/ # EF Core migrations (SQLite dev/CI; Postgres prod)
|
||||
Feedser.Connectors/
|
||||
Common/ # HTTP, pagination, ETag, backoff, schema validators
|
||||
Cve/ # CVE registry (id+refs)
|
||||
Nvd/ # NVD API v2 windows
|
||||
Ghsa/ # GHSA REST/GraphQL
|
||||
Osm.Osv/ # OSV API
|
||||
Jvn/ # MyJVN (JVNRSS/VULDEF)
|
||||
CertCc/ # CERT/CC Vulnerability Notes
|
||||
Kev/ # CISA Known Exploited
|
||||
Kr.Kisa/ # KISA/KrCERT advisories
|
||||
In.CertIn/ # CERT-In advisories
|
||||
Fr.CertFr/ # ANSSI CERT-FR avis/alertes
|
||||
De.CertBund/ # BSI CERT-Bund WID
|
||||
Au.Acsc/ # ACSC advisories
|
||||
Ca.Cccs/ # CCCS advisories
|
||||
Ru.Bdu/ # FSTEC BDU (HTML→schema; LLM fallback gated)
|
||||
Ru.Nkcki/ # NKCKI bulletins (HTML/PDF→text)
|
||||
Vndr.Msrc/ # MSRC CVRF
|
||||
Vndr.Cisco/ # Cisco PSIRT openVuln
|
||||
Vndr.Oracle/ # Oracle CPU/advisories
|
||||
Vndr.Adobe/ # Adobe APSB/APA
|
||||
Vndr.Apple/ # Apple HT201222 feed
|
||||
Vndr.Chromium/ # Chrome Releases security posts
|
||||
Vndr.Vmware/ # VMSA (Broadcom portal)
|
||||
Distro.RedHat/ # Red Hat Security Data API + OVAL
|
||||
Distro.Ubuntu/ # USN + Security API
|
||||
Distro.Debian/ # Debian Security Tracker JSON
|
||||
Distro.Suse/ # SUSE Update Advisories
|
||||
Ics.Cisa/ # CISA ICS advisories (ICSA-*)
|
||||
Ics.Kaspersky/ # Kaspersky ICS-CERT advisories
|
||||
Feedser.Merge/ # dedupe/aliases/precedence/version-ranges
|
||||
Feedser.Export.VulnList/ # optional vuln-list JSON renderer
|
||||
Feedser.Packagers.TrivyDb/ # db.tar.gz + metadata.json + ORAS push
|
||||
Feedser.Signing/ # cosign integration
|
||||
Feedser.Tests/
|
||||
etc/
|
||||
feedser.yaml # config template (extended, see §11)
|
||||
schemas/ # JSON Schema/XSD for inputs & internal payloads
|
||||
samples/ # golden fixtures per source
|
||||
|
||||
````
|
||||
- **Runtime**: .NET 10 (`net10.0`) preview SDK; C# latest preview features.
|
||||
- **Data**: MongoDB (canonical store and job/export state).
|
||||
- **Packaging**: Trivy DB (BoltDB inside `db.tar.gz`), vuln‑list JSON (optional), ORAS for OCI push.
|
||||
- **Observability**: structured logs, counters, and (optional) OpenTelemetry traces.
|
||||
- **Ops posture**: offline‑first, allowlist for remote hosts, strict schema validation, gated LLM fallback (only where explicitly configured).
|
||||
|
||||
---
|
||||
|
||||
## 4) Unified data model (relational + evented)
|
||||
## 6) Data flow (end‑to‑end)
|
||||
|
||||
**Storage default**: **SQLite** (dev/CI), **Postgres** (prod). EF Core migrations. Dapper for hot paths if needed.
|
||||
|
||||
**Tables (no change from v1 + PSIRT/CERT flags)**
|
||||
|
||||
- `source(id, name, type, base_url, auth_mode, notes)`
|
||||
- `watermark(source_id, cursor, updated_at)` ← **incremental windows per source**
|
||||
- `document(id, source_id, uri, fetched_at, content_sha256, content_type, status, raw_blob?, metadata_json)`
|
||||
- `advisory(id, advisory_key, title, summary, lang, published, modified,
|
||||
severity_cvss_v3?, severity_cvss_v4?, vendor_severity?,
|
||||
exploit_known bool)`
|
||||
- `alias(advisory_id, scheme, value)` — **schemes** include: CVE, GHSA, OSV, JVN, BDU, VU (CERT/CC), MSRC, CISCO‑SA, ORACLE‑CPU, APSB/APA, APPLE‑HT, CHROMIUM‑POST, VMSA, RHSA, USN, DSA, SUSE‑SU, ICSA, CWE, CPE, PURL, etc.
|
||||
- `affected(advisory_id, platform, name, version_range, cpe?, purl?, fixed_by?, introduced_version?)`
|
||||
- `reference(advisory_id, url, kind, source_tag)` — kind examples: advisory, patch, bulletin, kb, blog, vendor, exploit
|
||||
- `provenance(advisory_id, document_id, extraction, confidence, fields_mask)`
|
||||
- `kev_flag(advisory_id, kev_id, added_date, due_date?)`
|
||||
- `ru_flags(advisory_id, bdu_id?, nkcki_ids_json?, ru_severity?, notes?)`
|
||||
- `jp_flags(advisory_id, jvndb_id?, jvn_category?, vendor_status?)`
|
||||
- `psirt_flags(advisory_id, vendor, advisory_id_text, product_tags_json?)`
|
||||
- `merge_event(id, advisory_key, before_hash, after_hash, merged_at)`
|
||||
|
||||
**Indexes**: unique(advisory_key); index(scheme,value); index(platform,name); index(published); index(modified).
|
||||
|
||||
### 4.1) Alternate storage (MongoDB) — mapping (per your plan)
|
||||
If a **MongoDB** deployment is preferred, mirror the relational shape **as collections** with analogous names (`source`, `watermark`, `document`, `advisory`, `alias`, `affected`, `reference`, `provenance`, `kev_flag`, `ru_flags`, `jp_flags`, `psirt_flags`, `merge_event`).
|
||||
- Keep **advisory documents** flat and **embed** `aliases[]`, `affected[]`, `references[]` when practical; store **provenance** entries as embedded or sidecar collection depending on document growth.
|
||||
- Maintain **deterministic canonical JSON** for merges; hash stored in `merge_event`.
|
||||
- Incremental refreshes rely on the same **per‑source watermarks**.
|
||||
1. **Fetch**: connectors request source windows with retries/backoff, persist raw documents with SHA256/ETag metadata.
|
||||
2. **Parse**: validate to DTOs (schema‑checked), quarantine failures.
|
||||
3. **Map**: normalize to canonical advisories (aliases, affected ranges with NEVRA/EVR/SemVer, references, provenance).
|
||||
4. **Merge**: enforce precedence and determinism; track before/after hashes.
|
||||
5. **Export**: JSON tree and/or Trivy DB; package and (optionally) push; write export state.
|
||||
|
||||
---
|
||||
|
||||
## 5) Source connectors — contracts & incremental strategy
|
||||
## 7) Work-in-parallel rules (important)
|
||||
|
||||
**Common interface**
|
||||
```csharp
|
||||
public interface IFeedConnector {
|
||||
string SourceName { get; }
|
||||
Task FetchAsync(FeedserContext db, CancellationToken ct); // populate document rows
|
||||
Task ParseAsync(FeedserContext db, CancellationToken ct); // document -> DTOs (validated)
|
||||
Task MapAsync(FeedserContext db, CancellationToken ct); // DTOs -> UnifiedVuln tables + provenance
|
||||
}
|
||||
````
|
||||
|
||||
### 5.1 Registries & cross‑ecosystem
|
||||
|
||||
* **CVE (cve.org)** — *identifier registry*. Fetch for alias cross‑checks; minimal fields only. Watermark by last seen ID/time.
|
||||
* **NVD API v2** — sliding **modified windows** (e.g., 6–12h) with backoff and pagination. Persist CVSS/CWE/CPE as aliases; capture change history if present. Watermark = last successful `modified_end`.
|
||||
* **GHSA** — **REST** “global security advisories” + **GraphQL** for richer fields; **note**: `cvss` → `cvss_severities` deprecation → map accordingly. Watermark by updated timestamp/ID cursor.
|
||||
* **OSV** — fetch per eco or time range; map PURL + SemVer ranges.
|
||||
|
||||
### 5.2 National CERTs (incremental via RSS/API/pages)
|
||||
|
||||
* **CERT/CC Vulnerability Notes** — scrape/archive pages (VU#), and/or GitHub data archive when suitable. Watermark by VU publish date/ID.
|
||||
* **JVN / MyJVN (Japan)** — **MyJVN API**: JVNRSS (overview) + VULDEF (detail). Watermark by `dateFirstPublished`/`dateLastUpdated`. Map **JVNDB** IDs, CVE aliases, vendor status.
|
||||
* **RU‑CERT** — advisory/news portal; treat as **enrichment references** (aliases+refs), not a primary package range source. Watermark by post date.
|
||||
* **KISA (KrCERT/KRCERT)** — advisories/notices portal. Watermark by advisory date/ID.
|
||||
* **CERT‑In (India)** — **CIAD** advisories via portal pages; Watermark by advisory code/date.
|
||||
* **ANSSI/CERT‑FR** — *avis/alertes* RSS and list pages; Watermark by advisory ID/date.
|
||||
* **BSI CERT‑Bund (WID)** — “Technische Sicherheitshinweise” pages/feeds; Watermark by bulletin ID/date.
|
||||
* **ACSC (Australia)** — alerts/advisories; Watermark by publish date/slug.
|
||||
* **CCCS (Canada)** — advisories page; Watermark by date/slug.
|
||||
|
||||
### 5.3 Russia‑specific
|
||||
|
||||
* **FSTEC BDU** — **hybrid**: primary **HTML parser** → validate against our **internal XML schema**; if validation fails → **LLM extraction fallback** (strictly gated; see §7). Also support **bulk DB ingests** if official XML/Excel exports are available in the environment. Watermark by BDU ID/date.
|
||||
* **NKCKI** — bulletins list (HTML/PDF). Extract structured fields via PDF→text pipeline + post‑validation. Watermark by bulletin ID/date.
|
||||
|
||||
### 5.4 Vendor PSIRTs (canonical)
|
||||
|
||||
* **MSRC** — **CVRF API** monthly and per‑advisory endpoints. Watermark by month + last modified. Alias: `MSRC:<YYYY-MMM>`; references to KBs/CVEs.
|
||||
* **Cisco PSIRT (openVuln API)** — REST; filter by last published/updated. Alias: `CISCO-SA:<slug>`; map fixed releases.
|
||||
* **Oracle CPU / Security Alerts** — quarterly schedule (3rd Tue of Jan/Apr/Jul/Oct). Scrape CPU pages and advisories. Alias: `CPU:<YYYY-QQ>`; link per‑product CVEs. Watermark by CPU cycle.
|
||||
* **Adobe APSB/APA** — advisory index pages + product feeds. Alias: `APSB-YYYY-XX`.
|
||||
* **Apple** — **HT201222/“About Apple security releases”** index page(s). Alias: `APPLE-HT:HT201222:<yyyy-mm-dd>` + per‑product pages.
|
||||
* **Google Chromium** — **Chrome Releases** blog “Stable Channel Update” posts with security fix lists. Alias: `CHROMIUM-POST:<date>`.
|
||||
* **VMware (VMSA)** — Broadcom support portal VMSA pages; parse ID + affected products + CVEs. Alias: `VMSA-YYYY-XXXX`.
|
||||
|
||||
### 5.5 Linux distributions
|
||||
|
||||
* **Red Hat Security Data API** (CSAF/OVAL/CVE); plus OVAL content. **Precedence** for OS packages. Watermark via API `last_modified`/etag. Alias: `RHSA-YYYY:NNNN`.
|
||||
* **Ubuntu USN** — USN list + **Security API**; Watermark by USN ID/date. Alias: `USN-####-#`.
|
||||
* **Debian Security Tracker** — JSON dataset for CVE↔package↔suite; Watermark by file etag/commit. Alias: `DSA-####-#` (when present).
|
||||
* **SUSE** — security/update advisories pages; Watermark by SUSE‑SU ID/date. Alias: `SUSE-SU-YYYY:NNNN`.
|
||||
|
||||
### 5.6 Specialized / ICS
|
||||
|
||||
* **CISA ICS advisories (ICSA)** — list feeds; Watermark by ICSA‑ID. Alias: `ICSA-YY-###-##`.
|
||||
* **Kaspersky ICS‑CERT** — advisories list; Watermark by advisory ID/date; treat as authoritative vendor ICS source for impacted OT products.
|
||||
|
||||
### 5.7 Exploitation & enrichment
|
||||
|
||||
* **CISA KEV** — JSON catalog; set exploitation flag (`exploit_known=true`), store `kev_id`, `added_date`, `due_date`.
|
||||
- **Directory ownership**: Each agent works **only inside its module directory**. Cross‑module edits require a brief handshake in issues/PR description.
|
||||
- **Scoping**: Use each module’s `AGENTS.md` and `TASKS.md` to plan; autonomous agents must read `src/AGENTS.md` and the module docs before acting.
|
||||
- **Determinism**: Sort keys, normalize timestamps to UTC ISO‑8601, avoid non‑deterministic data in exports and tests.
|
||||
- **Status tracking**: Update your module’s `TASKS.md` as you progress (TODO → DOING → DONE/BLOCKED).
|
||||
- **Tests**: Add/extend fixtures and unit tests per change; never regress determinism or precedence.
|
||||
|
||||
---
|
||||
|
||||
## 6) Normalization details
|
||||
## 8) Glossary (quick)
|
||||
|
||||
**UnifiedVuln JSON (internal canonical)**
|
||||
|
||||
```json
|
||||
{
|
||||
"advisory_key": "CVE-2025-12345",
|
||||
"ids": { "cve": "CVE-2025-12345", "ghsa": "GHSA-xxxx", "bdu": "BDU:2025-06025", "jvndb": "JVNDB-2025-000123", "msrc": "2025-Jan" },
|
||||
"titles": [{ "text": "Buffer overflow in foo()", "lang": "en" }],
|
||||
"summary": { "text": "...", "lang": "en" },
|
||||
"published": "2025-06-21T12:00:00Z",
|
||||
"modified": "2025-07-03T09:00:00Z",
|
||||
"severity": {
|
||||
"cvss_v3": { "base": 9.8, "vector": "CVSS:3.1/..." },
|
||||
"cvss_v4": null,
|
||||
"vendor": "Critical"
|
||||
},
|
||||
"affected": [
|
||||
{ "platform": "os-distro", "name": "ubuntu:20.04",
|
||||
"cpe": "cpe:/o:canonical:ubuntu_linux:20.04",
|
||||
"version_range": "pkg:deb/ubuntu/foo<1.2.3-0ubuntu0.20.04.1",
|
||||
"fixed_by": "1.2.3-0ubuntu0.20.04.1"
|
||||
}
|
||||
],
|
||||
"references": [
|
||||
{ "url": "https://msrc.microsoft.com/update-guide", "kind": "advisory", "source": "MSRC" }
|
||||
],
|
||||
"exploitation": { "cisa_kev": true, "nkcki": false },
|
||||
"provenance": [
|
||||
{ "source": "RedHat", "document": "https://...", "method": "parser", "confidence": 1.0 }
|
||||
],
|
||||
"psirt": [{ "vendor": "Cisco", "advisory": "cisco-sa-..." }]
|
||||
}
|
||||
```
|
||||
|
||||
**Ranges**
|
||||
|
||||
* **OS packages**: distro semantics (Debian **EVR**, RPM **NEVRA**). Prefer OVAL/PSIRT source whenever available.
|
||||
* **Language ecosystems**: **SemVer** ranges with **PURL** coordinates; use OSV/GHSA fields for introduced/fixed events.
|
||||
* **Severity**: keep **all** CVSS sources; compute a max/consensus for display but preserve originals.
|
||||
|
||||
---
|
||||
|
||||
## 7) FSTEC BDU hybrid extraction (HTML→schema with gated LLM fallback)
|
||||
|
||||
1. **HTML parser** extracts into `BduHtmlExtract`.
|
||||
2. Validate against **internal XML schema** (XSD). Rules: `bdu_id` format `^BDU:\d{4}-\d{5}$`; CVE regex; date parse; severity enumeration.
|
||||
3. On validation failure: run **LLM extraction** (temperature 0) to the same JSON Schema; accept **only** if post‑validation passes and `confidence ≥ minConfidence`. Mark `provenance.method = "llm"`.
|
||||
4. Keep audit logs locally; default **offline model** in sovereign builds.
|
||||
|
||||
---
|
||||
|
||||
## 8) Merge & reconciliation (deterministic)
|
||||
|
||||
* **Identity**: prefer **CVE**; fallback to other keys (BDU/JVN/GHSA/MSRC/CISCO‑SA/VMSA/USN/DSA/SUSE‑SU/ICSA). Canonical `advisory_key`.
|
||||
* **Aliases**: store all cross‑refs (CVE, GHSA, OSV, JVN, BDU, MSRC, CISCO‑SA, ORACLE‑CPU, APSB, APPLE‑HT, CHROMIUM‑POST, VMSA, RHSA, USN, DSA, SUSE‑SU, ICSA, CWE, CPE, PURL…).
|
||||
* **Precedence**:
|
||||
|
||||
* OVAL/PSIRT **override** NVD for OS package ranges.
|
||||
* **KEV** sets exploitation flags only (no severity override).
|
||||
* Regional feeds **enrich** (severity text, mitigation, local notes).
|
||||
* **Determinism**: merged canonical JSON is hashed; store in `merge_event`.
|
||||
|
||||
---
|
||||
|
||||
## 9) Packaging & publishing
|
||||
|
||||
**v0**: render **vuln‑list–shaped JSON** → invoke stock **`trivy-db`** builder to get `metadata.json` + `trivy.db` → tar to `db.tar.gz` → **ORAS push** to your registry with **Trivy DB media types**.
|
||||
**v1**: native C# packager writing BoltDB + `metadata.json` and pushing via ORAS directly.
|
||||
|
||||
**Output contracts**
|
||||
|
||||
* **OCI media types**: layer `application/vnd.aquasec.trivy.db.layer.v1.tar+gzip`; config `application/vnd.aquasec.trivy.config.v1+json`.
|
||||
* Consumers point Trivy at your repo: `--db-repository REGISTRY/PATH`; for air‑gap ship `db.tar.gz`.
|
||||
|
||||
---
|
||||
|
||||
## 10) CLI (idempotent)
|
||||
|
||||
```
|
||||
feedser init
|
||||
feedser fetch --source nvd|cve|ghsa|osv|jvn|certcc|kev|kisa|certin|certfr|certbund|acsc|cccs|bdu|nkcki|msrc|cisco|oracle|adobe|apple|chromium|vmware|redhat|ubuntu|debian|suse [--since ...]
|
||||
feedser parse --source ...
|
||||
feedser merge
|
||||
feedser export vuln-list --out ./out/vuln-list/
|
||||
feedser pack trivy-db --out ./out/db.tar.gz
|
||||
feedser push trivy-db --repo registry.local/security/trivy-db --tag 2 [--auth env|file]
|
||||
feedser sign --artifact ./out/db.tar.gz --key cosign.key
|
||||
feedser status
|
||||
feedser gc --keep-raw 3
|
||||
feedser doctor # media types, registry auth, schema checks
|
||||
```
|
||||
|
||||
Exit codes: non‑zero on schema failure, network failure after retries, or merge non‑determinism.
|
||||
|
||||
---
|
||||
|
||||
## 11) Config (`etc/feedser.yaml`) — extended
|
||||
|
||||
```yaml
|
||||
storage:
|
||||
driver: sqlite
|
||||
dsn: "Data Source=feedser.db"
|
||||
|
||||
sources:
|
||||
cve: { enabled: true }
|
||||
nvd: { enabled: true, window_hours: 6 }
|
||||
ghsa: { enabled: true, github_token: "${GITHUB_TOKEN:-}", api: "rest+graphql" }
|
||||
osv: { enabled: true }
|
||||
jvn:
|
||||
enabled: true
|
||||
api_base: "https://jvndb.jvn.jp/en/apis/"
|
||||
window_days: 7
|
||||
certcc: { enabled: true }
|
||||
kev: { enabled: true }
|
||||
kisa: { enabled: false } # enable when endpoints/feeds are reachable in environment
|
||||
certin: { enabled: true }
|
||||
certfr: { enabled: true }
|
||||
certbund: { enabled: true }
|
||||
acsc: { enabled: true }
|
||||
cccs: { enabled: true }
|
||||
ru:
|
||||
bdu:
|
||||
enabled: true
|
||||
htmlFallback: true
|
||||
llmFallback: "gated"
|
||||
minConfidence: 0.85
|
||||
nkcki:
|
||||
enabled: true
|
||||
msrc: { enabled: true }
|
||||
cisco: { enabled: true, token: "${CISCO_OPENVULN_TOKEN:-}" }
|
||||
oracle: { enabled: true }
|
||||
adobe: { enabled: true }
|
||||
apple: { enabled: true }
|
||||
chromium: { enabled: true }
|
||||
vmware: { enabled: true }
|
||||
redhat:
|
||||
enabled: true
|
||||
api_base: "https://access.redhat.com/hydra/rest/securitydata"
|
||||
ubuntu:
|
||||
enabled: true
|
||||
api_base: "https://ubuntu.com/security/api"
|
||||
debian: { enabled: true }
|
||||
suse: { enabled: true }
|
||||
|
||||
packaging:
|
||||
trivy:
|
||||
publish: true
|
||||
repo: "registry.local/security/trivy-db"
|
||||
tag: "2"
|
||||
offline_bundle: true
|
||||
|
||||
observability:
|
||||
metrics: "stdout"
|
||||
logs: "json"
|
||||
level: "Information"
|
||||
tracing: "otlp"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 12) Observability & performance
|
||||
|
||||
* **Logs**: structured (Serilog); include `source`, `uri`, `status`, `parseMs`, `mappedCount`, `mergeDelta`.
|
||||
* **Metrics**: fetch latency, parse/validation failures, dedupe ratio, DB compaction time, package size, **per‑source rate‑limit counters**.
|
||||
* **Tracing**: OpenTelemetry spans per connector/step.
|
||||
* **Perf**: bounded parallelism per source; streaming XML; content‑hash short‑circuit for unchanged docs.
|
||||
|
||||
---
|
||||
|
||||
## 13) Tests & quality gates
|
||||
|
||||
* **Schema validation** for each connector (external JSON/XML → DTOs).
|
||||
* **Golden fixtures** per source (NVD page, GHSA JSON, OSV, JVN JVNRSS/VULDEF, CERT/CC VU HTML, BDU HTML, NKCKI PDF→text, MSRC CVRF, Cisco openVuln JSON, Oracle CPU HTML, Adobe APSB HTML, Apple HT list, Chrome Releases HTML, VMSA HTML, Red Hat API JSON, USN JSON, Debian JSON, SUSE HTML).
|
||||
* **Merge determinism** (hash‑stable).
|
||||
* **Parity scans**: compare Trivy scan using our DB vs upstream baseline on a reference set of images (differences expected where OVAL narrows ranges).
|
||||
* **Media‑type conformance** (OCI).
|
||||
* **Reproducible packaging**: build ID = hash(vuln‑list tree).
|
||||
|
||||
**Connector DoD**: watermarking; retries/backoff; schema‑validated parsing; mapping; unit tests; goldens; incremental pass; metrics.
|
||||
|
||||
---
|
||||
|
||||
## 14) Security & compliance
|
||||
|
||||
* Default **offline**; explicit allowlist per source host.
|
||||
* **LLM usage isolated** to BDU fallback; no external calls unless configured; redact logs; audit stored locally.
|
||||
* **cosign** signing for artifacts; store SHA256 and manifest digests.
|
||||
* Respect robots/ToS; prefer official APIs/feeds where available.
|
||||
|
||||
---
|
||||
|
||||
## 15) Concrete TODOs (first sprints)
|
||||
|
||||
1. **Storage**: EF Core models & migrations; `watermark` infra; repositories.
|
||||
2. **NVD**: windowed fetch; JSON Schema validation; mapper; watermark.
|
||||
3. **OVAL/Distros**: Red Hat (API+OVAL), Ubuntu (USN+API), Debian (JSON), SUSE (advisories).
|
||||
4. **KEV**: JSON ingest → `exploit_known=true`.
|
||||
5. **GHSA/OSV**: REST + GraphQL; map PURL/semver; handle `cvss_severities`.
|
||||
6. **JVN**: JVNRSS + VULDEF; alias mapping; watermark.
|
||||
7. **RU**: BDU HTML parser + XSD + LLM fallback; NKCKI bulletins harvester.
|
||||
8. **PSIRTs**: MSRC CVRF; Cisco openVuln; Oracle CPU; Adobe APSB; Apple; Chromium; VMware (VMSA).
|
||||
9. **Merge Engine**: aliasing + precedence; canonical JSON + hashing.
|
||||
10. **Export/Pack**: vuln‑list renderer; Trivy DB packaging; **ORAS push**; **offline bundle**.
|
||||
11. **CLI & doctor**; **cosign sign**; **status**.
|
||||
|
||||
---
|
||||
|
||||
## 16) MASTER SOURCE CATALOG (as provided — preserved verbatim)
|
||||
|
||||
### Primary Vulnerability Databases / Advisory Portals
|
||||
|
||||
| Vulnerability DB | Who Supports It | Type | URL | DB Type | What Data It Has |
|
||||
| ---------------------------------------------- | ---------------------------------------------------------- | ----------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | ---------------------- | ------------------------------------------------------------------ |
|
||||
| **CVE (Common Vulnerabilities and Exposures)** | MITRE (with CNA partners) | Identifier registry | [https://cve.org](https://cve.org) | Global ID registry | CVE IDs, basic description, references, assigner info |
|
||||
| **CERT/CC Vulnerability Notes** | Carnegie Mellon CERT/CC | National CERT / coordination center | [https://kb.cert.org/vuls](https://kb.cert.org/vuls) | Vulnerability Notes DB | VU# IDs, description, impact, vendors affected, references |
|
||||
| **JVN (Japan Vulnerability Notes)** | JPCERT/CC + IPA (Japan) | National CERT | [https://jvn.jp/en/](https://jvn.jp/en/) | Advisory DB | JVN IDs, affected products, mitigation, CVE mappings |
|
||||
| **RU-CERT** | Coordination Center for .RU / Russian CERT | National CERT | [https://www.cert.ru](https://www.cert.ru) | Advisory DB | Russian advisories, incident/vulnerability notes |
|
||||
| **CISA KEV Catalog & Advisories** | US CISA (DHS) | Government CERT / advisories | [https://www.cisa.gov/known-exploited-vulnerabilities](https://www.cisa.gov/known-exploited-vulnerabilities) | Catalog | KEV IDs, CVE links, exploited-in-wild status, remediation deadline |
|
||||
| **KISA (Korean CERT)** | Korea Internet & Security Agency | National CERT | [https://www.krcert.or.kr](https://www.krcert.or.kr) / [https://www.boho.or.kr/en/main.do](https://www.boho.or.kr/en/main.do) | Advisory portal | Korean advisories, CVE refs, guidance |
|
||||
| **CERT-In (India)** | Ministry of Electronics & IT | National CERT | [https://www.cert-in.org.in](https://www.cert-in.org.in) | Advisory portal | Indian CERT advisories, affected vendors, CVEs |
|
||||
| **ANSSI (France)** | Agence nationale de la sécurité des systèmes d'information | National CERT | [https://www.cert.ssi.gouv.fr](https://www.cert.ssi.gouv.fr) | Advisory portal | French advisories, technical notes, CVE refs |
|
||||
| **BSI (Germany, CERT-Bund)** | German Federal Office for Information Security | National CERT | [https://www.bsi.bund.de](https://www.bsi.bund.de) | Advisory portal | Vulnerability advisories, vendor notifications |
|
||||
| **ACSC (Australia)** | Australian Cyber Security Centre | National CERT | [https://www.cyber.gov.au](https://www.cyber.gov.au) | Advisory portal | Australian advisories, CVE refs, guidance |
|
||||
| **CCCS (Canada)** | Canadian Centre for Cyber Security | National CERT | [https://www.cyber.gc.ca](https://www.cyber.gc.ca) | Advisory portal | Canadian advisories, CVE refs |
|
||||
|
||||
### Vendor / PSIRT Databases (Primary)
|
||||
|
||||
| Vulnerability DB | Who Supports It | Type | URL | DB Type | What Data It Has |
|
||||
| --------------------------------------------- | --------------- | ------------ | ---------------------------------------------------------------------------------------------------------------------------- | ----------------------- | ----------------------------------------------------------- |
|
||||
| **Microsoft Security Response Center (MSRC)** | Microsoft | Vendor PSIRT | [https://msrc.microsoft.com/update-guide](https://msrc.microsoft.com/update-guide) | Advisory portal | MSRC IDs, CVE mappings, affected products/versions, patches |
|
||||
| **Cisco PSIRT** | Cisco Systems | Vendor PSIRT | [https://tools.cisco.com/security/center/publicationListing.x](https://tools.cisco.com/security/center/publicationListing.x) | Advisory DB | Cisco advisories, CVEs, product impact, fixes |
|
||||
| **Oracle CPU / Security Alerts** | Oracle | Vendor PSIRT | [https://www.oracle.com/security-alerts/](https://www.oracle.com/security-alerts/) | Advisory DB | Oracle CPUs, CVEs, affected products/versions, patches |
|
||||
| **Adobe Security Bulletins & Advisories** | Adobe | Vendor PSIRT | [https://helpx.adobe.com/security.html](https://helpx.adobe.com/security.html) | Advisory DB | APSB/APA IDs, CVEs, affected software, patches |
|
||||
| **Apple Security Updates** | Apple | Vendor PSIRT | [https://support.apple.com/en-us/HT201222](https://support.apple.com/en-us/HT201222) (security updates index) | Advisory portal | Apple advisories, CVEs, product versions, patches |
|
||||
| **Google Chromium Security** | Google | Vendor PSIRT | [https://chromereleases.googleblog.com](https://chromereleases.googleblog.com) | Advisory blog / tracker | Chromium/Android advisories, CVEs, fixes |
|
||||
| **VMware Security Advisories (VMSA)** | VMware/Broadcom | Vendor PSIRT | [https://www.broadcom.com/support/vmware-security-advisories](https://www.broadcom.com/support/vmware-security-advisories) | Advisory DB | VMSA IDs, CVEs, product versions, fixes |
|
||||
|
||||
### Linux Distribution Security Trackers (Primary)
|
||||
|
||||
| Vulnerability DB | Who Supports It | Type | URL | DB Type | What Data It Has |
|
||||
| ------------------------------------------- | --------------- | ------------------ | ---------------------------------------------------------------------------------------- | ------------------------------------- | --------------------------------------------------------- |
|
||||
| **Red Hat Security Data / RHSA** | Red Hat | Distro Security DB | [https://access.redhat.com/security/updates](https://access.redhat.com/security/updates) | Security advisories & OVAL/JSON feeds | RHSA IDs, CVEs, fixed package versions, affected products |
|
||||
| **Canonical Ubuntu Security Notices (USN)** | Canonical | Distro Security DB | [https://ubuntu.com/security/notices](https://ubuntu.com/security/notices) | Advisory DB | USN IDs, CVEs, affected packages, patches |
|
||||
| **Debian Security Tracker (DSA)** | Debian Project | Distro Security DB | [https://security-tracker.debian.org](https://security-tracker.debian.org) | Tracker + Advisories | DSA IDs, CVEs, package status per release |
|
||||
| **SUSE Security Announcements** | SUSE | Distro Security DB | [https://www.suse.com/support/security/](https://www.suse.com/support/security/) | Advisory DB | SUSE-SA/Update IDs, CVEs, package fix versions |
|
||||
|
||||
### Open Source Ecosystem Advisory Databases (Primary)
|
||||
|
||||
| Vulnerability DB | Who Supports It | Type | URL | DB Type | What Data It Has |
|
||||
| ----------------------------------------- | ---------------------- | ----------------------- | -------------------------------------------------------------- | --------------- | ------------------------------------------------------------------------------------------------- |
|
||||
| **GitHub Security Advisories (GHSA)** | GitHub (Microsoft) | Open Source Advisory DB | [https://github.com/advisories](https://github.com/advisories) | Advisory DB | GHSA IDs, CVEs, affected repos/packages, patches, severity |
|
||||
| **OSV.dev (Open Source Vulnerabilities)** | Google / OSS community | Open Source Advisory DB | [https://osv.dev](https://osv.dev) | Schema-based DB | OSV IDs, CVEs, affected ecosystems (npm, PyPI, Go, crates.io, Maven, etc.), version ranges, fixes |
|
||||
|
||||
### Specialized (ICS / Sectoral)
|
||||
|
||||
| Vulnerability DB | Who Supports It | Type | URL | DB Type | What Data It Has |
|
||||
| ----------------------- | ------------------ | ------------- | ---------------------------------------------------------------------------------------- | --------------- | --------------------------------------------------------------------- |
|
||||
| **CISA ICS Advisories** | US CISA (ICS‑CERT) | ICS sector DB | [https://www.cisa.gov/ics/advisories](https://www.cisa.gov/ics/advisories) | Advisory DB | ICS advisory IDs, CVEs, affected vendors, exploitability, mitigations |
|
||||
| **Kaspersky ICS CERT** | Kaspersky Lab | ICS CERT | [https://ics-cert.kaspersky.com/advisories/](https://ics-cert.kaspersky.com/advisories/) | Advisory portal | ICS advisories, CVEs, technical detail, mitigations |
|
||||
|
||||
---
|
||||
|
||||
## 17) Field‑mapping guide (per family)
|
||||
|
||||
**PSIRT**: set `psirt_flags.vendor` + vendor advisory ID in `alias` and `psirt_flags.advisory_id_text`. Always attach **patch references** and **fixed versions** into `affected.fixed_by`.
|
||||
**Distros**: treat **OVAL/JSON** as range authority; `alias` with RHSA/USN/DSA/SUSE‑SU; attach per‑suite/package status.
|
||||
**CERTs**: attach `reference(kind=bulletin)` and severity text; use as enrichment unless they include authoritative package ranges.
|
||||
**ICS**: map vendor & model families into `affected.platform="ics-vendor"` with product tags.
|
||||
**KEV**: set exploitation flags only.
|
||||
**BDU/JVN**: include local IDs (BDU, JVNDB) in `alias` and specific flags in `ru_flags`/`jp_flags`.
|
||||
|
||||
---
|
||||
|
||||
## 18) Reference commands & snippets
|
||||
|
||||
**ORAS push (Trivy DB v2)**
|
||||
|
||||
```bash
|
||||
oras push --artifact-type application/vnd.aquasec.trivy.config.v1+json \
|
||||
"registry.local/security/trivy-db:2" \
|
||||
db.tar.gz:application/vnd.aquasec.trivy.db.layer.v1.tar+gzip
|
||||
```
|
||||
|
||||
**Point Trivy at our repo**
|
||||
|
||||
```bash
|
||||
trivy image --db-repository registry.local/security/trivy-db --download-db-only
|
||||
```
|
||||
|
||||
**BDU LLM fallback gate (pseudo)**
|
||||
|
||||
```csharp
|
||||
if (!BduSchemaValidator.IsValid(parsed)) {
|
||||
var json = LlmExtractToJson(rawText, schema: BduSchema, temperature: 0);
|
||||
if (!BduSchemaValidator.IsValid(json) || Confidence(json) < minConfidence) Fail("BDU: low confidence");
|
||||
Save(json, provenance: "llm");
|
||||
} else {
|
||||
Save(parsed, provenance: "parser");
|
||||
}
|
||||
```
|
||||
|
||||
## Reference notes (authoritative links for the agent)
|
||||
|
||||
**Trivy self‑hosting / DB media types / vuln‑list**
|
||||
|
||||
* Trivy self‑hosting databases and `--db-repository` flag. ([trivy.dev][1])
|
||||
* DB repository & required OCI media type (`application/vnd.aquasec.trivy.db.layer.v1.tar+gzip`). ([Aqua Security][2])
|
||||
* `vuln-list` and `vuln-list-update` (inputs/build). ([GitHub][3])
|
||||
* `trivy-db` tool (builder/DB format). ([GitHub][4])
|
||||
* GitLab registry media‑type support for trivy‑db (confirmation of the two media types). ([about.gitlab.com][5])
|
||||
|
||||
**Global registries / cross‑ecosystem**
|
||||
|
||||
* CVE program (official). ([CVE][6])
|
||||
* NVD general/search. ([NVD][7])
|
||||
* GHSA DB and APIs (REST/GraphQL + deprecation notice). ([GitHub][8])
|
||||
* OSV.dev (DB + data sources). ([OSV][9])
|
||||
|
||||
**National CERTs**
|
||||
|
||||
* CERT/CC Vulnerability Notes + docs. ([CERT Coordination Center][10])
|
||||
* JVN / MyJVN API (Japan). ([JVN iPedia][11])
|
||||
* RU‑CERT (coordination center profile & site). ([cctld.ru][12])
|
||||
* KISA/KrCERT portals and examples. ([boho.or.kr][13])
|
||||
* CERT‑In (site, CNA role, sample advisory). ([CERT-IN][14])
|
||||
* ANSSI CERT‑FR portal and *avis*. ([cert.ssi.gouv.fr][15])
|
||||
* BSI CERT‑Bund WID pages. ([wid.cert-bund.de][16])
|
||||
* ACSC advisories hub. ([cyber.gov.au][17])
|
||||
* CCCS advisories hub. ([Canadian Centre for Cyber Security][18])
|
||||
|
||||
**Russia‑specific**
|
||||
|
||||
* BDU site and documentation of XML/Excel dumps (context). ([bdu.fstec.ru][19])
|
||||
* NKCKI vulnerability bulletins list. ([safe-surf.ru][20])
|
||||
|
||||
**Vendor PSIRTs**
|
||||
|
||||
* MSRC Security Update Guide + CVRF API examples. ([msrc.microsoft.com][21])
|
||||
* Cisco PSIRT advisories + openVuln API. ([Cisco][22])
|
||||
* Oracle CPU schedule / advisories. ([Oracle][23])
|
||||
* Adobe security advisories (index + product). ([Adobe Help Center][24])
|
||||
* Apple security releases index (HT201222 lineage). ([Apple Support][25])
|
||||
* Chrome Releases (stable updates with security fixes). ([Chrome Releases][26])
|
||||
* VMware Security Advisories (VMSA) on Broadcom; move notice. ([Broadcom][27])
|
||||
|
||||
**Linux distributions**
|
||||
|
||||
* Red Hat Security Data API (+ changelog/pointers). ([Red Hat Docs][28])
|
||||
* Ubuntu Security Notices & Security API. ([Ubuntu][29])
|
||||
* Debian Security Tracker (docs + JSON). ([Debian Security Tracker][30])
|
||||
* SUSE advisories. ([SUSE][31])
|
||||
|
||||
**Exploitation & ICS**
|
||||
|
||||
* CISA KEV catalog. ([CISA][32])
|
||||
* CISA ICS advisories hub (ICSA). ([CISA][33])
|
||||
* Kaspersky ICS‑CERT advisories. ([Kaspersky ICS-CERT][34])
|
||||
|
||||
If you want me to produce **starter EF models + migrations** and a **full `feedser.yaml`** file reflecting all of the above, I can output those files now.
|
||||
|
||||
[1]: https://trivy.dev/v0.60/docs/advanced/self-hosting/?utm_source=chatgpt.com "Self-Hosting Trivy's Databases"
|
||||
[2]: https://aquasecurity.github.io/trivy/v0.56/docs/configuration/db/?utm_source=chatgpt.com "DB"
|
||||
[3]: https://github.com/aquasecurity/vuln-list?utm_source=chatgpt.com "aquasecurity/vuln-list: NVD, Ubuntu, Alpine"
|
||||
[4]: https://github.com/aquasecurity/trivy-db?utm_source=chatgpt.com "aquasecurity/trivy-db"
|
||||
[5]: https://gitlab.com/gitlab-org/container-registry/-/merge_requests/957?utm_source=chatgpt.com "Add trivy-db media types - container-registry"
|
||||
[6]: https://www.cve.org/?utm_source=chatgpt.com "CVE: Common Vulnerabilities and Exposures"
|
||||
[7]: https://nvd.nist.gov/vuln/search?utm_source=chatgpt.com "NVD - Search and Statistics"
|
||||
[8]: https://github.com/advisories?utm_source=chatgpt.com "GitHub Advisory Database"
|
||||
[9]: https://osv.dev/?utm_source=chatgpt.com "OSV - Open Source Vulnerabilities"
|
||||
[10]: https://www.kb.cert.org/?utm_source=chatgpt.com "CERT Vulnerability Notes Database"
|
||||
[11]: https://jvndb.jvn.jp/en/apis/index.html?utm_source=chatgpt.com "MyJVN API"
|
||||
[12]: https://cctld.ru/files/pdf/RU-CERT.pdf?utm_source=chatgpt.com "RU-CERT.pdf"
|
||||
[13]: https://www.boho.or.kr/en/main.do?utm_source=chatgpt.com "KISA 인터넷 보호나라&KrCERT"
|
||||
[14]: https://www.cert-in.org.in/CNA.jsp?utm_source=chatgpt.com "CVE Numbering Authority (CNA) at CERT-In"
|
||||
[15]: https://www.cert.ssi.gouv.fr/?utm_source=chatgpt.com "CERT-FR – Centre gouvernemental de veille, d ... - l'ANSSI"
|
||||
[16]: https://wid.cert-bund.de/?utm_source=chatgpt.com "Warn- und Informationsdienst - Startseite - CERT-Bund"
|
||||
[17]: https://www.cyber.gov.au/about-us/view-all-content/alerts-and-advisories?utm_source=chatgpt.com "Alerts and advisories"
|
||||
[18]: https://www.cyber.gc.ca/en/alerts-advisories?utm_source=chatgpt.com "Alerts and advisories"
|
||||
[19]: https://bdu.fstec.ru/vul?utm_source=chatgpt.com "Уязвимости - БДУ"
|
||||
[20]: https://safe-surf.ru/specialists/bulletins-nkcki/?utm_source=chatgpt.com "Список новых уязвимостей ПО | Уведомления НКЦКИ"
|
||||
[21]: https://msrc.microsoft.com/update-guide?utm_source=chatgpt.com "Security Update Guide"
|
||||
[22]: https://sec.cloudapps.cisco.com/security/center/publicationListing.x?utm_source=chatgpt.com "Cisco Security Advisories"
|
||||
[23]: https://www.oracle.com/security-alerts/?utm_source=chatgpt.com "Critical Patch Updates, Security Alerts and Bulletins"
|
||||
[24]: https://helpx.adobe.com/security/security-bulletin.html?utm_source=chatgpt.com "Security Bulletins and Advisories"
|
||||
[25]: https://support.apple.com/en-us/100100?utm_source=chatgpt.com "Apple security releases"
|
||||
[26]: https://chromereleases.googleblog.com/?utm_source=chatgpt.com "Chrome Releases"
|
||||
[27]: https://www.broadcom.com/support/vmware-security-advisories?utm_source=chatgpt.com "VMware Security Advisories"
|
||||
[28]: https://docs.redhat.com/en/documentation/red_hat_security_data_api/1.0/html-single/red_hat_security_data_api/index?utm_source=chatgpt.com "Red Hat Security Data API | 1.0"
|
||||
[29]: https://ubuntu.com/security/notices?utm_source=chatgpt.com "Ubuntu Security Notices"
|
||||
[30]: https://security-tracker.debian.org/?utm_source=chatgpt.com "Security Bug Tracker - Debian"
|
||||
[31]: https://www.suse.com/support/update/?utm_source=chatgpt.com "SUSE:Update Advisories"
|
||||
[32]: https://www.cisa.gov/known-exploited-vulnerabilities-catalog?utm_source=chatgpt.com "Known Exploited Vulnerabilities Catalog"
|
||||
[33]: https://www.cisa.gov/news-events/ics-advisories?utm_source=chatgpt.com "ICS Advisories"
|
||||
[34]: https://ics-cert.kaspersky.com/advisories/?utm_source=chatgpt.com "Advisories"
|
||||
|
||||
## 19) Role Kickstart Playbooks (aligned with ARCHITECTURE.md & IMPLEMENTATION.md)
|
||||
|
||||
### 19.1 Shared pre-flight checklist
|
||||
- Read **ARCHITECTURE.md §§1–3** and **IMPLEMENTATION.md §§0–3** before writing code; treat this document as the quick-start guide and the others as depth references.
|
||||
- Confirm local toolchain: .NET 10 Preview 7 SDK (10.0.100-preview.7.25380.108), Docker, MongoDB (local or container), `oras` CLI, `cosign`, `yq`, `jq`, Chrome/Chromium for HTML schema inspection, `pdftotext` for NKCKI extracts.
|
||||
- Sync repo structure with `IMPLEMENTATION.md` naming (`StellaOps.Feedser.*` projects, Mongo storage) even if the CLI-first layout above still exists; prefer additive commits that converge both plans until deprecation is agreed.
|
||||
- Establish secrets handling: load tokens (GitHub, Cisco openVuln, etc.) via environment variables referenced in `etc/feedser.yaml`.
|
||||
- Instrument everything: Serilog + OpenTelemetry hooks should be wired during the first implementation of any loop so QA can observe behaviour from day one.
|
||||
- Definition of Done (all roles): schema validation in tests, deterministic outputs (hash snapshot checked in), logging/metrics assertions, and hand-off notes in `/docs/handbook/ROLE/<source>.md`.
|
||||
|
||||
### 19.2 BE-Base — Platform & Pipeline owner
|
||||
- **Sprint 0 focus** (IMPLEMENTATION.md §1): create the `StellaOps.Feedser.sln`, add the WebService, Core, Models, Storage.Mongo, Source.Common, Exporter.Json, Exporter.TrivyDb projects; seed `Directory.Build.props/targets`, `.editorconfig`, and analyzer packages (`StyleCop.Analyzers`, nullable `enable`, treat warnings as errors in CI).
|
||||
- Wire **CI** (`.github/workflows/feedser-ci.yml` or internal equivalent) running `dotnet restore/build/test`, lint (StyleCop), and container build check; artifacts should include the WebService image and test results.
|
||||
- Produce **devcontainer + Dockerfile** aligned with the Mongo-first run mode (ARCHITECTURE.md §2). Ensure `mongo` sidecar is declared in `devcontainer.json` for immediate onboarding.
|
||||
- Establish **configuration plumbing**: bind `appsettings.json` + `feedser.yaml` into strongly typed options, configure reload-on-change, and hydrate `SourceState`/`ExportState` repositories.
|
||||
- Create **Mongo collections/indexes** exactly as catalogued in ARCHITECTURE.md §3; provide integration tests under `StellaOps.Feedser.Tests/Storage` that assert index presence and TTL semantics.
|
||||
- Publish **contribution docs**: `/docs/contribute.md` summarizing coding standards, release tagging, and commit style (Implementation §1.5–1.6).
|
||||
- Hand-off: once WebService boots with `/health` and `/ready` endpoints, scheduler skeleton, and Mongo indices created on startup, notify BE-Conn/BE-Merge/BE-Export via project board and land a baseline tag (`v0.1.0-alpha1`).
|
||||
|
||||
### 19.3 BE-Conn-X — Source Connector engineers
|
||||
- Priority waves (IMPLEMENTATION.md §4 + §5): registries (CVE/NVD/GHSA/OSV), national CERTs, vendor PSIRTs, distros, KEV, ICS. Pick sources in order of dependency on precedence rules (e.g., Red Hat before Debian for RPM logic).
|
||||
- **Workflow** per source:
|
||||
1. Extend `StellaOps.Feedser.Source.Common` with fetch helpers (rate-limit, retries) if not already present; reuse `IConnectorClock` to respect windowed crawls.
|
||||
2. Implement `FetchAsync` to persist documents into Mongo `document` collection with SHA256 + metadata; follow the watermark guidance in §5 of this file.
|
||||
3. Validate raw payloads against schemas (JSON Schema, XSD, or `Joi` equivalent) and store sanitized DTOs in `dto`. Record validation stats in metrics.
|
||||
4. Map DTOs into canonical advisories using `StellaOps.Feedser.Models`. Guarantee alias completeness and provenance entries (`parser` vs `llm`).
|
||||
5. Provide **golden fixtures** in `samples/<source>/` and component tests under `StellaOps.Feedser.Tests/Source.<SourceName>` that cover fetch (with canned HTTP responses), parse, map, and incremental resume.
|
||||
- **Definition of Ready**: Base pipeline live, HTTP client registered, schema stub written, test scaffolding ready, tokens/keys documented. Coordinate with BE-Base for any additional shared tooling (HTML to XML transforms, PDF text extraction).
|
||||
- **Definition of Done**: deterministic DTO + map outputs (snapshot hashed, stored under `Feedser.Tests/__snapshots__`), metrics counters added, resume cursor unit tests, and documentation entry in `/docs/sources/<source>.md` describing rate limits, cursor logic, tested fixtures.
|
||||
- Engage QA early: schedule schema reviews before mapper coding to catch field omissions.
|
||||
|
||||
### 19.4 BE-Merge — Canonical merge & dedupe
|
||||
- Start after BE-Base lands canonical models scaffold (IMPLEMENTATION.md §5). Own the `StellaOps.Feedser.Models` definitions, canonical serialization, and hash calculator used by `merge_event`.
|
||||
- Implement **version range utilities** (RPM NEVRA, Debian EVR, SemVer) with exhaustive tests covering edge cases (epoch handling, tilde comparisons, wildcard SUSE ranges). Use fixtures from distro connectors to validate precedence rules.
|
||||
- Build the **identity graph**: CVE-first resolution with fallback to other alias schemes (outlined in §8 of this file). Guarantee deterministic ordering (sort keys + stable merges) and record `beforeHash/afterHash` deltas.
|
||||
- Enforce **precedence policies**: PSIRT/OVAL override generic ranges, KEV toggles exploitation flags without modifying severity, regional feeds enrich severity text but do not downgrade vendor truth. Cover these with integration tests using fused fixture sets.
|
||||
- Expose a **Merge service** with idempotent `MergeAsync(IEnumerable<Guid> advisoryIds)` that writes both canonical document and `merge_event` records per run. Provide metrics (`merge.delta.count`, `merge.identity.conflicts`).
|
||||
- Deliver initial **merge deterministic test**: same fixture set processed twice yields identical hashes; store hash snapshot under `/tests/data/merge/expected-hash.json`.
|
||||
|
||||
### 19.5 BE-Export — JSON & Trivy DB packaging
|
||||
- After BE-Merge exposes stable canonical output, implement **JSON exporter** mirroring `aquasecurity/vuln-list` layout (Implementation §5 step 4; ARCHITECTURE.md stage 3). Ensure directory determinism (sorted keys, newline conventions) and record export cursor consumption.
|
||||
- For **Trivy DB exporter**: wrap the official `trivy-db` builder initially; orchestrate invocation in-process or via CLI with reproducible environment variables. Persist resulting `metadata.json`, BoltDB file, and tarball in `export_state` with digests.
|
||||
- Integrate **ORAS push** pipeline guarded by dry-run flag; support offline bundle packaging (zip/tar) for air-gapped delivery. Provide config-driven repo/tag resolution from `feedser.yaml`.
|
||||
- Metrics to emit: `export.duration`, `export.records`, `export.size_bytes`, `export.delta` counts, ORAS push success/failure.
|
||||
- Tests: snapshot JSON tree, verify OCI manifest media types, simulate incremental export using seeded `advisory` records. Provide CLI smoke test hitting `POST /jobs/export/trivydb` (or CLI equivalent once added).
|
||||
|
||||
### 19.6 QA — Validation & observability lead
|
||||
- Build and maintain **test matrix** covering connectors, merge, export; enforce schema evolution workflows (any schema change must include updated fixtures, docs, and backward-compat diff summary).
|
||||
- Own **component test harness** utilities (HTTP canned server, PDF→text conversion mocks, time-travel clock). Ensure connectors implement `IClock` dependency for deterministic tests.
|
||||
- Set up **golden snapshot review** cadence; add tooling (`dotnet test --filter Category=Golden -- Dump`) to regenerate fixtures and compare diffs.
|
||||
- Monitor **observability baselines**: define default alert thresholds for fetch failures, merge conflicts, export delays. Provide dashboards or documented query templates (Grafana/Prometheus) referenced in `/docs/observability.md`.
|
||||
- Verify **reproducibility**: rerun end-to-end pipeline twice and compare exported digests; document discrepancies and feed them back to BE-Merge/BE-Export.
|
||||
|
||||
### 19.7 Coordination & delivery cadence
|
||||
- Sprint naming follows Implementation plan (Sprint 0–4+). Maintain a shared Kanban board with swimlanes per role; entries should reference the numbered tasks from IMPLEMENTATION.md for traceability.
|
||||
- Hold **daily 15-minute sync** focused on blockers, schema changes, and source-specific rate limits discovered. Escalate major schema/API shifts immediately and capture in `/docs/incidents/YYYY-MM-DD.md`.
|
||||
- Milestone gates:
|
||||
1. **Platform GA** — BE-Base tasks 1–6 closed; Mongo + scheduler operational.
|
||||
2. **Core connectors online** — NVD, GHSA, OSV, KEV delivering mapped advisories.
|
||||
3. **Distro precedence** — Red Hat + Ubuntu connectors validating version precedence vs NVD.
|
||||
4. **Merge deterministic** — hash stability test green.
|
||||
5. **Export ready** — JSON + Trivy DB artifacts validated and pushed to staging registry.
|
||||
|
||||
### 19.8 Hand-off artifacts & documentation expectations
|
||||
- Every completed task must append/adjust documentation: connector guides, merge rules, exporter usage. Store under `/docs` mirroring role ownership; include sample commands, curl invocations, and expected outputs.
|
||||
- Check in sample configs (`etc/feedser.local.yaml.example`) per role with placeholders for sensitive values.
|
||||
- Capture **post-task retros** in `docs/retros/<sprint>-<task>.md` summarizing lessons, API quirks, schema diffs, and follow-up tickets.
|
||||
- Maintain a shared **glossary** (`docs/glossary.md`) covering abbreviations (KEV, OVAL, EVR, NEVRA, GridFS) to speed up onboarding for new engineers.
|
||||
|
||||
---
|
||||
|
||||
The sections above convert the role taxonomy from IMPLEMENTATION.md into actionable playbooks while keeping the authoritative architecture and data contracts inline with this guide. Use them to bootstrap work, then dive into ARCHITECTURE.md and IMPLEMENTATION.md for exhaustive references and sprint-by-sprint sequencing.
|
||||
### Dependency Injection Registration Baseline
|
||||
|
||||
- Every product or library that needs DI registration must expose a `<ProductNamespace>.DependencyInjection` namespace/folder containing one or more implementations of `StellaOps.DependencyInjection.IDependencyInjectionRoutine`.
|
||||
- When distributing only a subset of connectors/exporters, create a dedicated solution that references just the desired plugin projects; the shared build rules always emit every `StellaOps.Feedser.Source.*` and `StellaOps.Feedser.Exporter.*` assembly into `PluginBinaries` by default.
|
||||
- The `IDependencyInjectionRoutine` interface lives in `src/__Libraries/StellaOps.DependencyInjection` and enables both static opt-in registration and reflection-driven root composition.
|
||||
- Each library should provide static helpers that wrap its DI registrations and a thin routine class that forwards to those helpers, for example:
|
||||
|
||||
```csharp
|
||||
public interface IDependencyInjectionRoutine
|
||||
{
|
||||
IServiceCollection Register(
|
||||
IServiceCollection services,
|
||||
IConfiguration configuration);
|
||||
}
|
||||
|
||||
public static class NamespaceLibrary
|
||||
{
|
||||
public static IServiceCollection RegisterNamespaceLibrary(
|
||||
IServiceCollection services,
|
||||
IConfiguration configuration)
|
||||
{
|
||||
// ...
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class DependencyInjectionRoutine : IDependencyInjectionRoutine
|
||||
{
|
||||
public IServiceCollection Register(
|
||||
IServiceCollection services,
|
||||
IConfiguration configuration)
|
||||
{
|
||||
return NamespaceLibrary.RegisterNamespaceLibrary(services, configuration);
|
||||
}
|
||||
}
|
||||
```
|
||||
- **OVAL** — Vendor/distro security definition format; authoritative for OS packages.
|
||||
- **NEVRA / EVR** — RPM and Debian version semantics for OS packages.
|
||||
- **PURL / SemVer** — Coordinates and version semantics for OSS ecosystems.
|
||||
- **KEV** — Known Exploited Vulnerabilities (flag only).
|
||||
@@ -1,3 +1,4 @@
|
||||
@ -1,191 +0,0 @@
|
||||
# ARCHITECTURE.md — **StellaOps.Feedser**
|
||||
|
||||
> **Goal**: Build a sovereign-ready, self-hostable **feed-merge service** that ingests authoritative vulnerability sources, normalizes and de-duplicates them into **MongoDB**, and exports **JSON** and **Trivy-compatible DB** artifacts.
|
||||
@@ -188,4 +189,3 @@ public interface IFeedConnector {
|
||||
* Default storage MongoDB; for air-gapped, bundle Mongo image + seeded data backup.
|
||||
* Horizontal scale achieved via multiple web service instances sharing Mongo locks.
|
||||
* Provide `feedser.yaml` template describing sources, rate limits, and export settings.
|
||||
|
||||
|
||||
@@ -1,175 +0,0 @@
|
||||
# ARCHITECTURE.md — **StellaOps.Feedser** · *Sprints & Tasks Delivery Plan*
|
||||
|
||||
> **Objective**: Deliver a production-ready **StellaOps.Feedser** web service that ingests authoritative vulnerability sources, normalizes & de-duplicates into **MongoDB**, and exports **JSON** and **Trivy-compatible DB** artifacts with **resume flags** for inputs and outputs.
|
||||
> **Team assumption**: **1 Base Engineer** (preparation / platform) in early sprints to unlock **parallel connector work** by many engineers later.
|
||||
> **Task sizing**: each task is **0.5–2.0 days** (est.).
|
||||
> **Runtime SDK baseline**: .NET 10 Preview 7 (SDK 10.0.100-preview.7.25380.108) with target framework `net10.0`, matching the deployed api.stella-ops.org build.
|
||||
> **Strict staging** (as previously defined):
|
||||
>
|
||||
> 1. **Source Download** (docs) → 2) **Merge + Dedupe + Normalization** (Mongo) → 3) **Export** (JSON / TrivyDB).
|
||||
> **No signing in Feedser**; signing/promotion is an **external pipeline** step.
|
||||
|
||||
---
|
||||
|
||||
## 0) Roles, Notation & Constraints
|
||||
|
||||
* **BE-Base** — Base Engineer (preparation/platform).
|
||||
* **BE-Conn-X** — Connector Engineer for source X (waves).
|
||||
* **BE-Merge** — Engineer focusing on merge/dedupe.
|
||||
* **BE-Export** — Engineer focusing on exporters.
|
||||
* **QA** — test/dev-ex reviewer (could be engineers rotating).
|
||||
* **Est.** — estimated effort for the task (0.5/1/1.5/2 days).
|
||||
* **Depends** — tasks that must finish first.
|
||||
* **Unlocks** — parallelizable work that becomes possible.
|
||||
|
||||
---
|
||||
|
||||
## 1) Sprint 0 — Inception & Repo Bootstrap (Week 1)
|
||||
|
||||
> Goal: Stand up solution scaffolding, CI, coding standards, and minimal docs to let others clone and build.
|
||||
|
||||
1. **Create solution & project skeletons**
|
||||
|
||||
* Create `src/` solution with projects & namespaces:
|
||||
|
||||
* `StellaOps.Feedser.WebService`, `StellaOps.Feedser.Core`, `StellaOps.Feedser.Models`,
|
||||
`StellaOps.Feedser.Storage.Mongo`, `StellaOps.Feedser.Source.Common`,
|
||||
`StellaOps.Feedser.Exporter.Json`, `StellaOps.Feedser.Exporter.TrivyDb`,
|
||||
stubs for `StellaOps.Feedser.Source.*` families.
|
||||
* Add top-level `Directory.Build.props` / `Directory.Build.targets`.
|
||||
* Pin all projects to `net10.0` and ensure CI/global.json uses .NET SDK 10.0.100-preview.7.25380.108 (same preview as api.stella-ops.org).
|
||||
|
||||
2. **Coding standards & analyzers**
|
||||
|
||||
* `.editorconfig`, `StyleCop.Analyzers`, Roslyn rulesets, nullable enabled, warnings as errors in CI.
|
||||
|
||||
3. **Solution build & test CI**
|
||||
|
||||
* GitHub Actions (or internal CI) with matrix: `dotnet build`, `dotnet test`, coverage, artifact publish.
|
||||
|
||||
4. **Docker build & devcontainer**
|
||||
|
||||
* Multi-stage Dockerfile for WebService; VS Code devcontainer (`.devcontainer/`).
|
||||
|
||||
5. **Docs scaffold**
|
||||
|
||||
* Seed `/docs` with `README.md`, link to `AGENTS.md`, `ARCHITECTURE.md`, contribution guide, commit message convention.
|
||||
|
||||
6. **Versioning & release tagging**
|
||||
|
||||
* SemVer policy; auto version from tags.
|
||||
|
||||
---
|
||||
|
||||
## 2) Sprint 1 — Configuration, MongoDB & Resume Flags (Week 2)
|
||||
|
||||
> Goal: Stand up MongoDB storage, resume flags for **sources** and **exports**, and config plumbing.
|
||||
|
||||
1. **Configuration plumbing** — Bind `appsettings.json` + optional `feedser.yaml` + ENV to strongly typed `FeedserConfig`.
|
||||
2. **Mongo client & database bootstrap** — Register `IMongoClient` / `IMongoDatabase`, health probes, retry policies.
|
||||
3. **Collections & indexes** — Create `source`, `source_state`, `document`, `dto`, `advisory`, `alias`, `affected`, `reference`, `kev_flag`, `ru_flags`, `jp_flags`, `psirt_flags`, `merge_event`, `export_state`, `locks`, `jobs`.
|
||||
4. **GridFS ingestion path** — Store large payloads, link via `document.gridFsId`, dedupe by SHA256.
|
||||
5. **Resume flags — sources** — CRUD helpers for `cursor`, `lastSuccess`, `failCount`, `backoffUntil`, `paused`, `paceOverrides`.
|
||||
6. **Resume flags — exports** — Manage `baseExportId`, `baseDigest`, `lastFullDigest`, `lastDeltaDigest`, `exportCursor`, `targetRepo`, `exporterVersion`.
|
||||
7. **Storage integration tests** — Validate indexes, TTL, GridFS round-trip, concurrency.
|
||||
|
||||
---
|
||||
|
||||
## 3) Sprint 2 — Web Service, APIs & Job Infrastructure (Week 3)
|
||||
|
||||
1. Minimal API host (`Program.cs`) exposing `/health`, `/ready`.
|
||||
2. Status & control endpoints (`/status`, `/sources`, pause/pace controls).
|
||||
3. Job ledger & state machine with `jobs` collection.
|
||||
4. Distributed lock service with lease & heartbeat.
|
||||
5. Scheduler with cron parsing, singleton enforcement, backoff.
|
||||
6. Kill/timeout support via cancellation tokens.
|
||||
7. Optional API key auth middleware.
|
||||
|
||||
---
|
||||
|
||||
## 4) Sprint 3 — Source Common, Rate Limit & Validation (Week 4)
|
||||
|
||||
1. Connector base classes implementing `IFeedConnector` helpers.
|
||||
2. HTTP client factory with rate limiter and retry policies.
|
||||
3. Schema/XSD/JSON validation framework.
|
||||
4. Document pipeline helpers for conditional GET, hashing, dedupe.
|
||||
5. DTO pipeline & provenance tracking.
|
||||
6. Observability wiring (Serilog, OpenTelemetry).
|
||||
|
||||
---
|
||||
|
||||
## 5) Sprint 4 — Normalization & Merge Engine (Week 5)
|
||||
|
||||
1. Canonical models (`StellaOps.Feedser.Models`).
|
||||
2. Version range utilities (RPM NEVRA, Debian EVR, SemVer).
|
||||
3. Alias graph & identity resolution.
|
||||
4. Precedence enforcement & deterministic serialization.
|
||||
5. Merge service with hash logging and metrics.
|
||||
6. Merge determinism tests.
|
||||
|
||||
---
|
||||
|
||||
## 6) Sprint 5 — Exporters & Packaging (Week 6)
|
||||
|
||||
1. JSON exporter mirroring `vuln-list` tree.
|
||||
2. Trivy DB exporter integration (`trivy-db` builder initially).
|
||||
3. ORAS push / offline bundle support.
|
||||
4. Export metrics & audit logging.
|
||||
5. Incremental export tests and media-type validation.
|
||||
|
||||
---
|
||||
|
||||
## 7) Sprint 6 — Connectors Wave 1 (Weeks 7–8)
|
||||
|
||||
* Registries: CVE, NVD, GHSA, OSV.
|
||||
* KEV catalog ingestion.
|
||||
* National CERTs: CERT/CC, JVN, CERT-FR, CERT-In, KISA, CERT-Bund, ACSC, CCCS.
|
||||
|
||||
Each connector deliverable includes: watermarking, schema validation, mapping, provenance, metrics, golden fixtures, incremental resume tests.
|
||||
|
||||
---
|
||||
|
||||
## 8) Sprint 7 — Connectors Wave 2 (Weeks 9–10)
|
||||
|
||||
* PSIRTs: MSRC, Cisco, Oracle, Adobe, Apple, Chromium, VMware.
|
||||
* Distros: Red Hat (API + OVAL), Ubuntu, Debian, SUSE.
|
||||
* ICS: CISA ICS, Kaspersky ICS-CERT.
|
||||
* Russia-specific: BDU (HTML + LLM fallback), NKCKI.
|
||||
|
||||
---
|
||||
|
||||
## 9) Sprint 8 — Merge Hardening & QA
|
||||
|
||||
* Conflict resolution scenarios (mixed aliases, partial data).
|
||||
* Performance tuning, batching, streaming parse.
|
||||
* Deterministic output regression tests.
|
||||
* Golden snapshot review tooling.
|
||||
|
||||
---
|
||||
|
||||
## 10) Sprint 9 — Packaging, Delivery & Ops
|
||||
|
||||
* Reproducible export pipelines (hash-based build IDs).
|
||||
* OCI push automation, offline bundle scripts.
|
||||
* Observability dashboards, alerts.
|
||||
* Disaster recovery playbooks (Mongo backups, export restore).
|
||||
|
||||
---
|
||||
|
||||
## 11) Sprint 10 — Launch Readiness
|
||||
|
||||
* Penetration test fixes, security hardening.
|
||||
* Documentation polish (operator guides, API reference).
|
||||
* Release tagging, change log, migration notes.
|
||||
* GA criteria review with stakeholders.
|
||||
|
||||
---
|
||||
|
||||
## Definition of Done (for any code change)
|
||||
|
||||
1. Unit/integration tests updated and passing.
|
||||
2. Schema/golden fixtures regenerated when applicable.
|
||||
3. Telemetry (logs/metrics/traces) reviewed for signal quality.
|
||||
4. Docs updated (`/docs`, `/etc/feedser.yaml` examples).
|
||||
5. Reproducibility verified (hash match on repeated run).
|
||||
|
||||
27
src/StellaOps.Feedser/StellaOps.Feedser.Core/AGENTS.md
Normal file
27
src/StellaOps.Feedser/StellaOps.Feedser.Core/AGENTS.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
Job orchestration and lifecycle. Registers job definitions, schedules execution, triggers runs, reports status for connectors and exporters.
|
||||
## Scope
|
||||
- Contracts: IJob (execute with CancellationToken), JobRunStatus, JobTriggerOutcome/Result.
|
||||
- Registration: JobSchedulerBuilder.AddJob<T>(kind, cronExpression?, timeout?, leaseDuration?); options recorded in JobSchedulerOptions.
|
||||
- Plugin host integration discovers IJob providers via registered IDependencyInjectionRoutine implementations.
|
||||
- Coordination: start/stop, single-flight via storage locks/leases, run bookkeeping (status, timings, errors).
|
||||
- Triggering: manual/cron/API; parameterized runs; idempotent rejection if already running.
|
||||
- Surfacing: enumerate definitions, last run, recent runs, active runs to WebService endpoints.
|
||||
## Participants
|
||||
- WebService exposes REST endpoints for definitions, runs, active, and trigger.
|
||||
- Storage.Mongo persists job definitions metadata, run documents, and leases (locks collection).
|
||||
- Source connectors and Exporters implement IJob and are registered into the scheduler via DI and Plugin routines.
|
||||
- Models/Merge/Export are invoked indirectly through jobs.
|
||||
- Plugin host runtime loads dependency injection routines that register job definitions.
|
||||
## Interfaces & contracts
|
||||
- Kind naming: family:source:verb (e.g., nvd:fetch, redhat:map, export:trivy-db).
|
||||
- Timeout and lease duration enforce cancellation and duplicate-prevention.
|
||||
- TimeProvider used for deterministic timing in tests.
|
||||
## In/Out of scope
|
||||
In: job lifecycle, registration, trigger semantics, run metadata.
|
||||
Out: business logic of connectors/exporters, HTTP handlers (owned by WebService).
|
||||
## Observability & security expectations
|
||||
- Metrics: job.run.started/succeeded/failed, job.durationMs, job.concurrent.rejected, job.alreadyRunning.
|
||||
- Logs: kind, trigger, params hash, lease holder, outcome; redact params containing secrets.
|
||||
- Honor CancellationToken early and often.
|
||||
@@ -1,8 +0,0 @@
|
||||
namespace StellaOps.Feedser.Core;
|
||||
|
||||
/// <summary>
|
||||
/// Marker type for assembly discovery.
|
||||
/// </summary>
|
||||
public sealed class FeedserCoreMarker
|
||||
{
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public interface IJob
|
||||
{
|
||||
Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public interface IJobCoordinator
|
||||
{
|
||||
Task<JobTriggerResult> TriggerAsync(string kind, IReadOnlyDictionary<string, object?>? parameters, string trigger, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobDefinition>> GetDefinitionsAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> GetRunAsync(Guid runId, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public interface IJobStore
|
||||
{
|
||||
Task<JobRunSnapshot> CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> FindAsync(Guid runId, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public interface ILeaseStore
|
||||
{
|
||||
Task<JobLease?> TryAcquireAsync(string key, string holder, TimeSpan leaseDuration, DateTimeOffset now, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobLease?> HeartbeatAsync(string key, string holder, TimeSpan leaseDuration, DateTimeOffset now, CancellationToken cancellationToken);
|
||||
|
||||
Task<bool> ReleaseAsync(string key, string holder, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,635 @@
|
||||
using System.Collections;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Globalization;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed class JobCoordinator : IJobCoordinator
|
||||
{
|
||||
private readonly JobSchedulerOptions _options;
|
||||
private readonly IJobStore _jobStore;
|
||||
private readonly ILeaseStore _leaseStore;
|
||||
private readonly IServiceScopeFactory _scopeFactory;
|
||||
private readonly ILogger<JobCoordinator> _logger;
|
||||
private readonly ILoggerFactory _loggerFactory;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly JobDiagnostics _diagnostics;
|
||||
private readonly string _holderId;
|
||||
|
||||
public JobCoordinator(
|
||||
IOptions<JobSchedulerOptions> optionsAccessor,
|
||||
IJobStore jobStore,
|
||||
ILeaseStore leaseStore,
|
||||
IServiceScopeFactory scopeFactory,
|
||||
ILogger<JobCoordinator> logger,
|
||||
ILoggerFactory loggerFactory,
|
||||
TimeProvider timeProvider,
|
||||
JobDiagnostics diagnostics)
|
||||
{
|
||||
_options = (optionsAccessor ?? throw new ArgumentNullException(nameof(optionsAccessor))).Value;
|
||||
_jobStore = jobStore ?? throw new ArgumentNullException(nameof(jobStore));
|
||||
_leaseStore = leaseStore ?? throw new ArgumentNullException(nameof(leaseStore));
|
||||
_scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
|
||||
_holderId = BuildHolderId();
|
||||
}
|
||||
|
||||
public async Task<JobTriggerResult> TriggerAsync(string kind, IReadOnlyDictionary<string, object?>? parameters, string trigger, CancellationToken cancellationToken)
|
||||
{
|
||||
using var triggerActivity = _diagnostics.StartTriggerActivity(kind, trigger);
|
||||
|
||||
if (!_options.Definitions.TryGetValue(kind, out var definition))
|
||||
{
|
||||
var result = JobTriggerResult.NotFound($"Job kind '{kind}' is not registered.");
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Error, result.ErrorMessage);
|
||||
triggerActivity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "not_found");
|
||||
return result;
|
||||
}
|
||||
|
||||
triggerActivity?.SetTag("job.enabled", definition.Enabled);
|
||||
triggerActivity?.SetTag("job.timeout_seconds", definition.Timeout.TotalSeconds);
|
||||
triggerActivity?.SetTag("job.lease_seconds", definition.LeaseDuration.TotalSeconds);
|
||||
|
||||
if (!definition.Enabled)
|
||||
{
|
||||
var result = JobTriggerResult.Disabled($"Job kind '{kind}' is disabled.");
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Ok, "disabled");
|
||||
triggerActivity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "disabled");
|
||||
return result;
|
||||
}
|
||||
|
||||
parameters ??= new Dictionary<string, object?>();
|
||||
|
||||
var parameterSnapshot = parameters.Count == 0
|
||||
? new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||
: new Dictionary<string, object?>(parameters, StringComparer.Ordinal);
|
||||
|
||||
if (!TryNormalizeParameters(parameterSnapshot, out var normalizedParameters, out var parameterError))
|
||||
{
|
||||
var message = string.IsNullOrWhiteSpace(parameterError)
|
||||
? "Job trigger parameters contain unsupported values."
|
||||
: parameterError;
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Error, message);
|
||||
triggerActivity?.SetTag("job.trigger.outcome", JobTriggerOutcome.InvalidParameters.ToString());
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "invalid_parameters");
|
||||
return JobTriggerResult.InvalidParameters(message);
|
||||
}
|
||||
|
||||
parameterSnapshot = normalizedParameters;
|
||||
|
||||
string? parametersHash;
|
||||
try
|
||||
{
|
||||
parametersHash = JobParametersHasher.Compute(parameterSnapshot);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var message = $"Job trigger parameters cannot be serialized: {ex.Message}";
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Error, message);
|
||||
triggerActivity?.SetTag("job.trigger.outcome", JobTriggerOutcome.InvalidParameters.ToString());
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "invalid_parameters");
|
||||
_logger.LogWarning(ex, "Failed to serialize parameters for job {Kind}", kind);
|
||||
return JobTriggerResult.InvalidParameters(message);
|
||||
}
|
||||
|
||||
triggerActivity?.SetTag("job.parameters_count", parameterSnapshot.Count);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var leaseDuration = definition.LeaseDuration <= TimeSpan.Zero ? _options.DefaultLeaseDuration : definition.LeaseDuration;
|
||||
|
||||
JobLease? lease = null;
|
||||
try
|
||||
{
|
||||
lease = await _leaseStore.TryAcquireAsync(definition.LeaseKey, _holderId, leaseDuration, now, cancellationToken).ConfigureAwait(false);
|
||||
if (lease is null)
|
||||
{
|
||||
var result = JobTriggerResult.AlreadyRunning($"Job '{kind}' is already running.");
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Ok, "already_running");
|
||||
triggerActivity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "already_running");
|
||||
return result;
|
||||
}
|
||||
|
||||
var createdAt = _timeProvider.GetUtcNow();
|
||||
var request = new JobRunCreateRequest(
|
||||
definition.Kind,
|
||||
trigger,
|
||||
parameterSnapshot,
|
||||
parametersHash,
|
||||
definition.Timeout,
|
||||
leaseDuration,
|
||||
createdAt);
|
||||
|
||||
triggerActivity?.SetTag("job.parameters_hash", request.ParametersHash);
|
||||
|
||||
var run = await _jobStore.CreateAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
var startedAt = _timeProvider.GetUtcNow();
|
||||
var started = await _jobStore.TryStartAsync(run.RunId, startedAt, cancellationToken).ConfigureAwait(false) ?? run;
|
||||
|
||||
triggerActivity?.SetTag("job.run_id", started.RunId);
|
||||
triggerActivity?.SetTag("job.created_at", createdAt.UtcDateTime);
|
||||
triggerActivity?.SetTag("job.started_at", started.StartedAt?.UtcDateTime ?? startedAt.UtcDateTime);
|
||||
|
||||
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
if (definition.Timeout > TimeSpan.Zero)
|
||||
{
|
||||
linkedTokenSource.CancelAfter(definition.Timeout);
|
||||
}
|
||||
|
||||
var capturedLease = lease ?? throw new InvalidOperationException("Lease acquisition returned null.");
|
||||
try
|
||||
{
|
||||
_ = Task.Run(() => ExecuteJobAsync(definition, capturedLease, started, parameterSnapshot, trigger, linkedTokenSource), CancellationToken.None)
|
||||
.ContinueWith(t =>
|
||||
{
|
||||
if (t.Exception is not null)
|
||||
{
|
||||
_logger.LogError(t.Exception, "Unhandled job execution failure for {Kind}", definition.Kind);
|
||||
}
|
||||
},
|
||||
TaskContinuationOptions.OnlyOnFaulted | TaskContinuationOptions.ExecuteSynchronously);
|
||||
lease = null; // released by background job execution
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
lease = capturedLease; // ensure outer finally releases if scheduling fails
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
triggerActivity?.SetTag("job.trigger.outcome", "exception");
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "queue_failure");
|
||||
throw;
|
||||
}
|
||||
|
||||
var accepted = JobTriggerResult.Accepted(started);
|
||||
_diagnostics.RecordTriggerAccepted(kind, trigger);
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Ok);
|
||||
triggerActivity?.SetTag("job.trigger.outcome", accepted.Outcome.ToString());
|
||||
return accepted;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
triggerActivity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
triggerActivity?.SetTag("job.trigger.outcome", "exception");
|
||||
_diagnostics.RecordTriggerRejected(kind, trigger, "exception");
|
||||
throw;
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Release handled by background execution path. If we failed before scheduling, release here.
|
||||
if (lease is not null)
|
||||
{
|
||||
var releaseError = await TryReleaseLeaseAsync(lease, definition.Kind).ConfigureAwait(false);
|
||||
if (releaseError is not null)
|
||||
{
|
||||
_logger.LogError(releaseError, "Failed to release lease {LeaseKey} for job {Kind}", lease.Key, definition.Kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<JobDefinition>> GetDefinitionsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
IReadOnlyList<JobDefinition> results = _options.Definitions.Values.OrderBy(x => x.Kind, StringComparer.Ordinal).ToArray();
|
||||
return Task.FromResult(results);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken)
|
||||
=> _jobStore.GetRecentRunsAsync(kind, limit, cancellationToken);
|
||||
|
||||
public Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken)
|
||||
=> _jobStore.GetActiveRunsAsync(cancellationToken);
|
||||
|
||||
public Task<JobRunSnapshot?> GetRunAsync(Guid runId, CancellationToken cancellationToken)
|
||||
=> _jobStore.FindAsync(runId, cancellationToken);
|
||||
|
||||
public Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken)
|
||||
=> _jobStore.GetLastRunAsync(kind, cancellationToken);
|
||||
|
||||
public Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken)
|
||||
=> _jobStore.GetLastRunsAsync(kinds, cancellationToken);
|
||||
|
||||
private static bool TryNormalizeParameters(
|
||||
IReadOnlyDictionary<string, object?> source,
|
||||
out Dictionary<string, object?> normalized,
|
||||
out string? error)
|
||||
{
|
||||
if (source.Count == 0)
|
||||
{
|
||||
normalized = new Dictionary<string, object?>(StringComparer.Ordinal);
|
||||
error = null;
|
||||
return true;
|
||||
}
|
||||
|
||||
normalized = new Dictionary<string, object?>(source.Count, StringComparer.Ordinal);
|
||||
foreach (var kvp in source)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(kvp.Key))
|
||||
{
|
||||
error = "Parameter keys must be non-empty strings.";
|
||||
normalized = default!;
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
normalized[kvp.Key] = NormalizeParameterValue(kvp.Value);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
error = $"Parameter '{kvp.Key}' cannot be serialized: {ex.Message}";
|
||||
normalized = default!;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
error = null;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static object? NormalizeParameterValue(object? value)
|
||||
{
|
||||
if (value is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (value)
|
||||
{
|
||||
case string or bool or double or decimal:
|
||||
return value;
|
||||
case byte or sbyte or short or ushort or int or long:
|
||||
return Convert.ToInt64(value, CultureInfo.InvariantCulture);
|
||||
case uint ui:
|
||||
return Convert.ToInt64(ui);
|
||||
case ulong ul when ul <= long.MaxValue:
|
||||
return (long)ul;
|
||||
case ulong ul:
|
||||
return ul.ToString(CultureInfo.InvariantCulture);
|
||||
case float f:
|
||||
return (double)f;
|
||||
case DateTime dt:
|
||||
return dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime();
|
||||
case DateTimeOffset dto:
|
||||
return dto.ToUniversalTime();
|
||||
case TimeSpan ts:
|
||||
return ts.ToString("c", CultureInfo.InvariantCulture);
|
||||
case Guid guid:
|
||||
return guid.ToString("D");
|
||||
case Enum enumValue:
|
||||
return enumValue.ToString();
|
||||
case byte[] bytes:
|
||||
return Convert.ToBase64String(bytes);
|
||||
case JsonDocument document:
|
||||
return NormalizeJsonElement(document.RootElement);
|
||||
case JsonElement element:
|
||||
return NormalizeJsonElement(element);
|
||||
case IDictionary dictionary:
|
||||
{
|
||||
var nested = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
foreach (DictionaryEntry entry in dictionary)
|
||||
{
|
||||
if (entry.Key is not string key || string.IsNullOrWhiteSpace(key))
|
||||
{
|
||||
throw new InvalidOperationException("Nested dictionary keys must be non-empty strings.");
|
||||
}
|
||||
|
||||
nested[key] = NormalizeParameterValue(entry.Value);
|
||||
}
|
||||
|
||||
return nested;
|
||||
}
|
||||
case IEnumerable enumerable when value is not string:
|
||||
{
|
||||
var list = new List<object?>();
|
||||
foreach (var item in enumerable)
|
||||
{
|
||||
list.Add(NormalizeParameterValue(item));
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
default:
|
||||
throw new InvalidOperationException($"Unsupported parameter value of type '{value.GetType().FullName}'.");
|
||||
}
|
||||
}
|
||||
|
||||
private static object? NormalizeJsonElement(JsonElement element)
|
||||
{
|
||||
return element.ValueKind switch
|
||||
{
|
||||
JsonValueKind.Null => null,
|
||||
JsonValueKind.String => element.GetString(),
|
||||
JsonValueKind.True => true,
|
||||
JsonValueKind.False => false,
|
||||
JsonValueKind.Number => element.TryGetInt64(out var l)
|
||||
? l
|
||||
: element.TryGetDecimal(out var dec)
|
||||
? dec
|
||||
: element.GetDouble(),
|
||||
JsonValueKind.Object => NormalizeJsonObject(element),
|
||||
JsonValueKind.Array => NormalizeJsonArray(element),
|
||||
_ => throw new InvalidOperationException($"Unsupported JSON value '{element.ValueKind}'."),
|
||||
};
|
||||
}
|
||||
|
||||
private static SortedDictionary<string, object?> NormalizeJsonObject(JsonElement element)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
foreach (var property in element.EnumerateObject())
|
||||
{
|
||||
result[property.Name] = NormalizeJsonElement(property.Value);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static List<object?> NormalizeJsonArray(JsonElement element)
|
||||
{
|
||||
var items = new List<object?>();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
items.Add(NormalizeJsonElement(item));
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
private async Task<JobRunSnapshot?> CompleteRunAsync(Guid runId, JobRunStatus status, string? error, CancellationToken cancellationToken)
|
||||
{
|
||||
var completedAt = _timeProvider.GetUtcNow();
|
||||
var completion = new JobRunCompletion(status, completedAt, error);
|
||||
return await _jobStore.TryCompleteAsync(runId, completion, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private TimeSpan? ResolveDuration(JobRunSnapshot original, JobRunSnapshot? completed)
|
||||
{
|
||||
if (completed?.Duration is { } duration)
|
||||
{
|
||||
return duration;
|
||||
}
|
||||
|
||||
var startedAt = completed?.StartedAt ?? original.StartedAt ?? original.CreatedAt;
|
||||
var completedAt = completed?.CompletedAt ?? _timeProvider.GetUtcNow();
|
||||
var elapsed = completedAt - startedAt;
|
||||
return elapsed >= TimeSpan.Zero ? elapsed : null;
|
||||
}
|
||||
|
||||
private static async Task<Exception?> ObserveLeaseTaskAsync(Task heartbeatTask)
|
||||
{
|
||||
try
|
||||
{
|
||||
await heartbeatTask.ConfigureAwait(false);
|
||||
return null;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return ex;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<Exception?> TryReleaseLeaseAsync(JobLease lease, string kind)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _leaseStore.ReleaseAsync(lease.Key, _holderId, CancellationToken.None).ConfigureAwait(false);
|
||||
return null;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new LeaseMaintenanceException($"Failed to release lease for job '{kind}'.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static Exception? CombineLeaseExceptions(Exception? first, Exception? second)
|
||||
{
|
||||
if (first is null)
|
||||
{
|
||||
return second;
|
||||
}
|
||||
|
||||
if (second is null)
|
||||
{
|
||||
return first;
|
||||
}
|
||||
|
||||
return new AggregateException(first, second);
|
||||
}
|
||||
|
||||
private async Task ExecuteJobAsync(
|
||||
JobDefinition definition,
|
||||
JobLease lease,
|
||||
JobRunSnapshot run,
|
||||
IReadOnlyDictionary<string, object?> parameters,
|
||||
string trigger,
|
||||
CancellationTokenSource linkedTokenSource)
|
||||
{
|
||||
using (linkedTokenSource)
|
||||
{
|
||||
var cancellationToken = linkedTokenSource.Token;
|
||||
using var heartbeatCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
var heartbeatTask = MaintainLeaseAsync(definition, lease, heartbeatCts.Token);
|
||||
|
||||
using var activity = _diagnostics.StartExecutionActivity(run.Kind, trigger, run.RunId);
|
||||
activity?.SetTag("job.timeout_seconds", definition.Timeout.TotalSeconds);
|
||||
activity?.SetTag("job.lease_seconds", definition.LeaseDuration.TotalSeconds);
|
||||
activity?.SetTag("job.parameters_count", parameters.Count);
|
||||
activity?.SetTag("job.created_at", run.CreatedAt.UtcDateTime);
|
||||
activity?.SetTag("job.started_at", (run.StartedAt ?? run.CreatedAt).UtcDateTime);
|
||||
activity?.SetTag("job.parameters_hash", run.ParametersHash);
|
||||
|
||||
_diagnostics.RecordRunStarted(run.Kind);
|
||||
|
||||
JobRunStatus finalStatus = JobRunStatus.Succeeded;
|
||||
string? error = null;
|
||||
Exception? executionException = null;
|
||||
JobRunSnapshot? completedSnapshot = null;
|
||||
Exception? leaseException = null;
|
||||
|
||||
try
|
||||
{
|
||||
using var scope = _scopeFactory.CreateScope();
|
||||
var job = (IJob)scope.ServiceProvider.GetRequiredService(definition.JobType);
|
||||
var jobLogger = _loggerFactory.CreateLogger(definition.JobType);
|
||||
|
||||
var context = new JobExecutionContext(
|
||||
run.RunId,
|
||||
run.Kind,
|
||||
trigger,
|
||||
parameters,
|
||||
scope.ServiceProvider,
|
||||
_timeProvider,
|
||||
jobLogger);
|
||||
|
||||
await job.ExecuteAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException oce)
|
||||
{
|
||||
finalStatus = JobRunStatus.Cancelled;
|
||||
error = oce.Message;
|
||||
executionException = oce;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
finalStatus = JobRunStatus.Failed;
|
||||
error = ex.ToString();
|
||||
executionException = ex;
|
||||
}
|
||||
finally
|
||||
{
|
||||
heartbeatCts.Cancel();
|
||||
|
||||
leaseException = await ObserveLeaseTaskAsync(heartbeatTask).ConfigureAwait(false);
|
||||
|
||||
var releaseException = await TryReleaseLeaseAsync(lease, definition.Kind).ConfigureAwait(false);
|
||||
leaseException = CombineLeaseExceptions(leaseException, releaseException);
|
||||
|
||||
if (leaseException is not null)
|
||||
{
|
||||
var leaseMessage = $"Lease maintenance failed: {leaseException.GetType().Name}: {leaseException.Message}";
|
||||
if (finalStatus != JobRunStatus.Failed)
|
||||
{
|
||||
finalStatus = JobRunStatus.Failed;
|
||||
error = leaseMessage;
|
||||
executionException = leaseException;
|
||||
}
|
||||
else
|
||||
{
|
||||
error = string.IsNullOrWhiteSpace(error)
|
||||
? leaseMessage
|
||||
: $"{error}{Environment.NewLine}{leaseMessage}";
|
||||
executionException = executionException is null
|
||||
? leaseException
|
||||
: new AggregateException(executionException, leaseException);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
completedSnapshot = await CompleteRunAsync(run.RunId, finalStatus, error, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(error))
|
||||
{
|
||||
activity?.SetTag("job.error", error);
|
||||
}
|
||||
|
||||
activity?.SetTag("job.status", finalStatus.ToString());
|
||||
|
||||
var completedDuration = ResolveDuration(run, completedSnapshot);
|
||||
if (completedDuration.HasValue)
|
||||
{
|
||||
activity?.SetTag("job.duration_seconds", completedDuration.Value.TotalSeconds);
|
||||
}
|
||||
|
||||
switch (finalStatus)
|
||||
{
|
||||
case JobRunStatus.Succeeded:
|
||||
activity?.SetStatus(ActivityStatusCode.Ok);
|
||||
_logger.LogInformation("Job {Kind} run {RunId} succeeded", run.Kind, run.RunId);
|
||||
break;
|
||||
case JobRunStatus.Cancelled:
|
||||
activity?.SetStatus(ActivityStatusCode.Ok, "cancelled");
|
||||
_logger.LogWarning(executionException, "Job {Kind} run {RunId} cancelled", run.Kind, run.RunId);
|
||||
break;
|
||||
case JobRunStatus.Failed:
|
||||
activity?.SetStatus(ActivityStatusCode.Error, executionException?.Message ?? error);
|
||||
_logger.LogError(executionException, "Job {Kind} run {RunId} failed", run.Kind, run.RunId);
|
||||
break;
|
||||
}
|
||||
|
||||
_diagnostics.RecordRunCompleted(run.Kind, finalStatus, completedDuration, error);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task MaintainLeaseAsync(JobDefinition definition, JobLease lease, CancellationToken cancellationToken)
|
||||
{
|
||||
var leaseDuration = lease.LeaseDuration <= TimeSpan.Zero ? _options.DefaultLeaseDuration : lease.LeaseDuration;
|
||||
var delay = TimeSpan.FromMilliseconds(Math.Max(1000, leaseDuration.TotalMilliseconds / 2));
|
||||
|
||||
while (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
try
|
||||
{
|
||||
await _leaseStore.HeartbeatAsync(definition.LeaseKey, _holderId, leaseDuration, now, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new LeaseMaintenanceException($"Failed to heartbeat lease for job '{definition.Kind}'.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildHolderId()
|
||||
{
|
||||
var machine = Environment.MachineName;
|
||||
var processId = Environment.ProcessId;
|
||||
return $"{machine}:{processId}";
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class LeaseMaintenanceException : Exception
|
||||
{
|
||||
public LeaseMaintenanceException(string message, Exception innerException)
|
||||
: base(message, innerException)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
internal static class JobParametersHasher
|
||||
{
|
||||
internal static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
public static string? Compute(IReadOnlyDictionary<string, object?> parameters)
|
||||
{
|
||||
if (parameters is null || parameters.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var canonicalJson = JsonSerializer.Serialize(Sort(parameters), SerializerOptions);
|
||||
var bytes = Encoding.UTF8.GetBytes(canonicalJson);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static SortedDictionary<string, object?> Sort(IReadOnlyDictionary<string, object?> parameters)
|
||||
{
|
||||
var sorted = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
foreach (var kvp in parameters)
|
||||
{
|
||||
sorted[kvp.Key] = kvp.Value;
|
||||
}
|
||||
|
||||
return sorted;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed record JobDefinition(
|
||||
string Kind,
|
||||
Type JobType,
|
||||
TimeSpan Timeout,
|
||||
TimeSpan LeaseDuration,
|
||||
string? CronExpression,
|
||||
bool Enabled)
|
||||
{
|
||||
public string LeaseKey => $"job:{Kind}";
|
||||
}
|
||||
@@ -0,0 +1,171 @@
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed class JobDiagnostics : IDisposable
|
||||
{
|
||||
public const string ActivitySourceName = "StellaOps.Feedser.Jobs";
|
||||
public const string MeterName = "StellaOps.Feedser.Jobs";
|
||||
public const string TriggerActivityName = "feedser.job.trigger";
|
||||
public const string ExecuteActivityName = "feedser.job.execute";
|
||||
public const string SchedulerActivityName = "feedser.scheduler.evaluate";
|
||||
|
||||
private readonly Counter<long> _triggersAccepted;
|
||||
private readonly Counter<long> _triggersRejected;
|
||||
private readonly Counter<long> _runsCompleted;
|
||||
private readonly UpDownCounter<long> _runsActive;
|
||||
private readonly Histogram<double> _runDurationSeconds;
|
||||
private readonly Histogram<double> _schedulerSkewMilliseconds;
|
||||
|
||||
public JobDiagnostics()
|
||||
{
|
||||
ActivitySource = new ActivitySource(ActivitySourceName);
|
||||
Meter = new Meter(MeterName);
|
||||
|
||||
_triggersAccepted = Meter.CreateCounter<long>(
|
||||
name: "feedser.jobs.triggers.accepted",
|
||||
unit: "count",
|
||||
description: "Number of job trigger requests accepted for execution.");
|
||||
|
||||
_triggersRejected = Meter.CreateCounter<long>(
|
||||
name: "feedser.jobs.triggers.rejected",
|
||||
unit: "count",
|
||||
description: "Number of job trigger requests rejected or ignored by the coordinator.");
|
||||
|
||||
_runsCompleted = Meter.CreateCounter<long>(
|
||||
name: "feedser.jobs.runs.completed",
|
||||
unit: "count",
|
||||
description: "Number of job executions that have finished grouped by outcome.");
|
||||
|
||||
_runsActive = Meter.CreateUpDownCounter<long>(
|
||||
name: "feedser.jobs.runs.active",
|
||||
unit: "count",
|
||||
description: "Current number of running job executions.");
|
||||
|
||||
_runDurationSeconds = Meter.CreateHistogram<double>(
|
||||
name: "feedser.jobs.runs.duration",
|
||||
unit: "s",
|
||||
description: "Distribution of job execution durations in seconds.");
|
||||
|
||||
_schedulerSkewMilliseconds = Meter.CreateHistogram<double>(
|
||||
name: "feedser.scheduler.skew",
|
||||
unit: "ms",
|
||||
description: "Difference between the intended and actual scheduler fire time in milliseconds.");
|
||||
}
|
||||
|
||||
public ActivitySource ActivitySource { get; }
|
||||
|
||||
public Meter Meter { get; }
|
||||
|
||||
public Activity? StartTriggerActivity(string kind, string trigger)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity(TriggerActivityName, ActivityKind.Internal);
|
||||
if (activity is not null)
|
||||
{
|
||||
activity.SetTag("job.kind", kind);
|
||||
activity.SetTag("job.trigger", trigger);
|
||||
}
|
||||
|
||||
return activity;
|
||||
}
|
||||
|
||||
public Activity? StartSchedulerActivity(string kind, DateTimeOffset scheduledFor, DateTimeOffset invokedAt)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity(SchedulerActivityName, ActivityKind.Internal);
|
||||
if (activity is not null)
|
||||
{
|
||||
activity.SetTag("job.kind", kind);
|
||||
activity.SetTag("job.scheduled_for", scheduledFor.UtcDateTime);
|
||||
activity.SetTag("job.invoked_at", invokedAt.UtcDateTime);
|
||||
activity.SetTag("job.scheduler_delay_ms", (invokedAt - scheduledFor).TotalMilliseconds);
|
||||
}
|
||||
|
||||
return activity;
|
||||
}
|
||||
|
||||
public Activity? StartExecutionActivity(string kind, string trigger, Guid runId)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity(ExecuteActivityName, ActivityKind.Internal);
|
||||
if (activity is not null)
|
||||
{
|
||||
activity.SetTag("job.kind", kind);
|
||||
activity.SetTag("job.trigger", trigger);
|
||||
activity.SetTag("job.run_id", runId);
|
||||
}
|
||||
|
||||
return activity;
|
||||
}
|
||||
|
||||
public void RecordTriggerAccepted(string kind, string trigger)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "job.kind", kind },
|
||||
{ "job.trigger", trigger },
|
||||
};
|
||||
_triggersAccepted.Add(1, tags);
|
||||
}
|
||||
|
||||
public void RecordTriggerRejected(string kind, string trigger, string reason)
|
||||
{
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "job.kind", kind },
|
||||
{ "job.trigger", trigger },
|
||||
{ "job.reason", reason },
|
||||
};
|
||||
_triggersRejected.Add(1, tags);
|
||||
}
|
||||
|
||||
public void RecordRunStarted(string kind)
|
||||
{
|
||||
var tags = new TagList { { "job.kind", kind } };
|
||||
_runsActive.Add(1, tags);
|
||||
}
|
||||
|
||||
public void RecordRunCompleted(string kind, JobRunStatus status, TimeSpan? duration, string? error)
|
||||
{
|
||||
var outcome = status.ToString();
|
||||
|
||||
var completionTags = new TagList
|
||||
{
|
||||
{ "job.kind", kind },
|
||||
{ "job.status", outcome },
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(error))
|
||||
{
|
||||
completionTags.Add("job.error", error);
|
||||
}
|
||||
|
||||
_runsCompleted.Add(1, completionTags);
|
||||
|
||||
var activeTags = new TagList { { "job.kind", kind } };
|
||||
_runsActive.Add(-1, activeTags);
|
||||
|
||||
if (duration.HasValue)
|
||||
{
|
||||
var seconds = Math.Max(duration.Value.TotalSeconds, 0d);
|
||||
var durationTags = new TagList
|
||||
{
|
||||
{ "job.kind", kind },
|
||||
{ "job.status", outcome },
|
||||
};
|
||||
_runDurationSeconds.Record(seconds, durationTags);
|
||||
}
|
||||
}
|
||||
|
||||
public void RecordSchedulerSkew(string kind, DateTimeOffset scheduledFor, DateTimeOffset invokedAt)
|
||||
{
|
||||
var skew = (invokedAt - scheduledFor).TotalMilliseconds;
|
||||
var tags = new TagList { { "job.kind", kind } };
|
||||
_schedulerSkewMilliseconds.Record(skew, tags);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
ActivitySource.Dispose();
|
||||
Meter.Dispose();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed class JobExecutionContext
|
||||
{
|
||||
public JobExecutionContext(
|
||||
Guid runId,
|
||||
string kind,
|
||||
string trigger,
|
||||
IReadOnlyDictionary<string, object?> parameters,
|
||||
IServiceProvider services,
|
||||
TimeProvider timeProvider,
|
||||
ILogger logger)
|
||||
{
|
||||
RunId = runId;
|
||||
Kind = kind;
|
||||
Trigger = trigger;
|
||||
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
|
||||
Services = services ?? throw new ArgumentNullException(nameof(services));
|
||||
TimeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
Logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public Guid RunId { get; }
|
||||
|
||||
public string Kind { get; }
|
||||
|
||||
public string Trigger { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, object?> Parameters { get; }
|
||||
|
||||
public IServiceProvider Services { get; }
|
||||
|
||||
public TimeProvider TimeProvider { get; }
|
||||
|
||||
public ILogger Logger { get; }
|
||||
|
||||
public T GetRequiredService<T>() where T : notnull
|
||||
=> Services.GetRequiredService<T>();
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed record JobLease(
|
||||
string Key,
|
||||
string Holder,
|
||||
DateTimeOffset AcquiredAt,
|
||||
DateTimeOffset HeartbeatAt,
|
||||
TimeSpan LeaseDuration,
|
||||
DateTimeOffset TtlAt);
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed record JobRunCompletion(
|
||||
JobRunStatus Status,
|
||||
DateTimeOffset CompletedAt,
|
||||
string? Error);
|
||||
@@ -0,0 +1,10 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed record JobRunCreateRequest(
|
||||
string Kind,
|
||||
string Trigger,
|
||||
IReadOnlyDictionary<string, object?> Parameters,
|
||||
string? ParametersHash,
|
||||
TimeSpan? Timeout,
|
||||
TimeSpan? LeaseDuration,
|
||||
DateTimeOffset CreatedAt);
|
||||
@@ -0,0 +1,21 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
/// <summary>
|
||||
/// Immutable projection of a job run as stored in persistence.
|
||||
/// </summary>
|
||||
public sealed record JobRunSnapshot(
|
||||
Guid RunId,
|
||||
string Kind,
|
||||
JobRunStatus Status,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset? StartedAt,
|
||||
DateTimeOffset? CompletedAt,
|
||||
string Trigger,
|
||||
string? ParametersHash,
|
||||
string? Error,
|
||||
TimeSpan? Timeout,
|
||||
TimeSpan? LeaseDuration,
|
||||
IReadOnlyDictionary<string, object?> Parameters)
|
||||
{
|
||||
public TimeSpan? Duration => StartedAt is null || CompletedAt is null ? null : CompletedAt - StartedAt;
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public enum JobRunStatus
|
||||
{
|
||||
Pending,
|
||||
Running,
|
||||
Succeeded,
|
||||
Failed,
|
||||
Cancelled,
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed class JobSchedulerBuilder
|
||||
{
|
||||
private readonly IServiceCollection _services;
|
||||
|
||||
public JobSchedulerBuilder(IServiceCollection services)
|
||||
{
|
||||
_services = services ?? throw new ArgumentNullException(nameof(services));
|
||||
}
|
||||
|
||||
public JobSchedulerBuilder AddJob<TJob>(
|
||||
string kind,
|
||||
string? cronExpression = null,
|
||||
TimeSpan? timeout = null,
|
||||
TimeSpan? leaseDuration = null,
|
||||
bool enabled = true)
|
||||
where TJob : class, IJob
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(kind);
|
||||
|
||||
_services.AddTransient<TJob>();
|
||||
_services.Configure<JobSchedulerOptions>(options =>
|
||||
{
|
||||
if (options.Definitions.ContainsKey(kind))
|
||||
{
|
||||
throw new InvalidOperationException($"Job '{kind}' is already registered.");
|
||||
}
|
||||
|
||||
var resolvedTimeout = timeout ?? options.DefaultTimeout;
|
||||
var resolvedLease = leaseDuration ?? options.DefaultLeaseDuration;
|
||||
|
||||
options.Definitions.Add(kind, new JobDefinition(
|
||||
kind,
|
||||
typeof(TJob),
|
||||
resolvedTimeout,
|
||||
resolvedLease,
|
||||
cronExpression,
|
||||
enabled));
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
using Cronos;
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that evaluates cron expressions for registered jobs and triggers them.
|
||||
/// </summary>
|
||||
public sealed class JobSchedulerHostedService : BackgroundService
|
||||
{
|
||||
private readonly IJobCoordinator _coordinator;
|
||||
private readonly JobSchedulerOptions _options;
|
||||
private readonly ILogger<JobSchedulerHostedService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly JobDiagnostics _diagnostics;
|
||||
private readonly Dictionary<string, CronExpression> _cronExpressions = new(StringComparer.Ordinal);
|
||||
private readonly Dictionary<string, DateTimeOffset> _nextOccurrences = new(StringComparer.Ordinal);
|
||||
|
||||
public JobSchedulerHostedService(
|
||||
IJobCoordinator coordinator,
|
||||
IOptions<JobSchedulerOptions> optionsAccessor,
|
||||
ILogger<JobSchedulerHostedService> logger,
|
||||
TimeProvider timeProvider,
|
||||
JobDiagnostics diagnostics)
|
||||
{
|
||||
_coordinator = coordinator ?? throw new ArgumentNullException(nameof(coordinator));
|
||||
_options = (optionsAccessor ?? throw new ArgumentNullException(nameof(optionsAccessor))).Value;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
|
||||
|
||||
foreach (var definition in _options.Definitions.Values)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(definition.CronExpression))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var cron = CronExpression.Parse(definition.CronExpression!, CronFormat.Standard);
|
||||
_cronExpressions[definition.Kind] = cron;
|
||||
}
|
||||
catch (CronFormatException ex)
|
||||
{
|
||||
_logger.LogError(ex, "Invalid cron expression '{Cron}' for job {Kind}", definition.CronExpression, definition.Kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
if (_cronExpressions.Count == 0)
|
||||
{
|
||||
_logger.LogInformation("No cron-based jobs registered; scheduler idle.");
|
||||
await Task.Delay(Timeout.Infinite, stoppingToken).ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var nextWake = now.AddMinutes(5); // default sleep when nothing scheduled
|
||||
|
||||
foreach (var (kind, cron) in _cronExpressions)
|
||||
{
|
||||
if (!_options.Definitions.TryGetValue(kind, out var definition) || !definition.Enabled)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var next = GetNextOccurrence(kind, cron, now);
|
||||
if (next <= now.AddMilliseconds(500))
|
||||
{
|
||||
_ = TriggerJobAsync(kind, next, stoppingToken);
|
||||
_nextOccurrences[kind] = GetNextOccurrence(kind, cron, now.AddSeconds(1));
|
||||
next = _nextOccurrences[kind];
|
||||
}
|
||||
|
||||
if (next < nextWake)
|
||||
{
|
||||
nextWake = next;
|
||||
}
|
||||
}
|
||||
|
||||
var delay = nextWake - now;
|
||||
if (delay < TimeSpan.FromSeconds(1))
|
||||
{
|
||||
delay = TimeSpan.FromSeconds(1);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await Task.Delay(delay, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private DateTimeOffset GetNextOccurrence(string kind, CronExpression cron, DateTimeOffset reference)
|
||||
{
|
||||
if (_nextOccurrences.TryGetValue(kind, out var cached) && cached > reference)
|
||||
{
|
||||
return cached;
|
||||
}
|
||||
|
||||
var next = cron.GetNextOccurrence(reference.UtcDateTime, TimeZoneInfo.Utc);
|
||||
if (next is null)
|
||||
{
|
||||
// No future occurrence; schedule far in future to avoid tight loop.
|
||||
next = reference.UtcDateTime.AddYears(100);
|
||||
}
|
||||
|
||||
var nextUtc = DateTime.SpecifyKind(next.Value, DateTimeKind.Utc);
|
||||
var offset = new DateTimeOffset(nextUtc);
|
||||
_nextOccurrences[kind] = offset;
|
||||
return offset;
|
||||
}
|
||||
|
||||
private async Task TriggerJobAsync(string kind, DateTimeOffset scheduledFor, CancellationToken stoppingToken)
|
||||
{
|
||||
var invokedAt = _timeProvider.GetUtcNow();
|
||||
_diagnostics.RecordSchedulerSkew(kind, scheduledFor, invokedAt);
|
||||
|
||||
using var activity = _diagnostics.StartSchedulerActivity(kind, scheduledFor, invokedAt);
|
||||
try
|
||||
{
|
||||
var result = await _coordinator.TriggerAsync(kind, parameters: null, trigger: "scheduler", stoppingToken).ConfigureAwait(false);
|
||||
activity?.SetTag("job.trigger.outcome", result.Outcome.ToString());
|
||||
if (result.Run is not null)
|
||||
{
|
||||
activity?.SetTag("job.run_id", result.Run.RunId);
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(result.ErrorMessage))
|
||||
{
|
||||
activity?.SetTag("job.trigger.error", result.ErrorMessage);
|
||||
}
|
||||
|
||||
if (result.Outcome == JobTriggerOutcome.Accepted)
|
||||
{
|
||||
activity?.SetStatus(ActivityStatusCode.Ok);
|
||||
}
|
||||
else
|
||||
{
|
||||
activity?.SetStatus(ActivityStatusCode.Ok, result.Outcome.ToString());
|
||||
}
|
||||
|
||||
if (result.Outcome != JobTriggerOutcome.Accepted)
|
||||
{
|
||||
_logger.LogDebug("Scheduler trigger for {Kind} resulted in {Outcome}", kind, result.Outcome);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
|
||||
_logger.LogError(ex, "Cron trigger for job {Kind} failed", kind);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public sealed class JobSchedulerOptions
|
||||
{
|
||||
public static JobSchedulerOptions Empty { get; } = new();
|
||||
|
||||
public IDictionary<string, JobDefinition> Definitions { get; } = new Dictionary<string, JobDefinition>(StringComparer.Ordinal);
|
||||
|
||||
public TimeSpan DefaultTimeout { get; set; } = TimeSpan.FromMinutes(15);
|
||||
|
||||
public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5);
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public enum JobTriggerOutcome
|
||||
{
|
||||
Accepted,
|
||||
NotFound,
|
||||
Disabled,
|
||||
AlreadyRunning,
|
||||
LeaseRejected,
|
||||
InvalidParameters,
|
||||
Failed,
|
||||
Cancelled,
|
||||
}
|
||||
|
||||
public sealed record JobTriggerResult(JobTriggerOutcome Outcome, JobRunSnapshot? Run, string? ErrorMessage)
|
||||
{
|
||||
public static JobTriggerResult Accepted(JobRunSnapshot run)
|
||||
=> new(JobTriggerOutcome.Accepted, run, null);
|
||||
|
||||
public static JobTriggerResult NotFound(string message)
|
||||
=> new(JobTriggerOutcome.NotFound, null, message);
|
||||
|
||||
public static JobTriggerResult Disabled(string message)
|
||||
=> new(JobTriggerOutcome.Disabled, null, message);
|
||||
|
||||
public static JobTriggerResult AlreadyRunning(string message)
|
||||
=> new(JobTriggerOutcome.AlreadyRunning, null, message);
|
||||
|
||||
public static JobTriggerResult LeaseRejected(string message)
|
||||
=> new(JobTriggerOutcome.LeaseRejected, null, message);
|
||||
|
||||
public static JobTriggerResult InvalidParameters(string message)
|
||||
=> new(JobTriggerOutcome.InvalidParameters, null, message);
|
||||
|
||||
public static JobTriggerResult Failed(JobRunSnapshot run, string error)
|
||||
=> new(JobTriggerOutcome.Failed, run, error);
|
||||
|
||||
public static JobTriggerResult Cancelled(JobRunSnapshot run, string error)
|
||||
=> new(JobTriggerOutcome.Cancelled, run, error);
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
public static class JobServiceCollectionExtensions
|
||||
{
|
||||
public static JobSchedulerBuilder AddJobScheduler(this IServiceCollection services, Action<JobSchedulerOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var optionsBuilder = services.AddOptions<JobSchedulerOptions>();
|
||||
if (configure is not null)
|
||||
{
|
||||
optionsBuilder.Configure(configure);
|
||||
}
|
||||
|
||||
services.AddSingleton(sp => sp.GetRequiredService<IOptions<JobSchedulerOptions>>().Value);
|
||||
services.AddSingleton<JobDiagnostics>();
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.AddSingleton<IJobCoordinator, JobCoordinator>();
|
||||
services.AddHostedService<JobSchedulerHostedService>();
|
||||
|
||||
return new JobSchedulerBuilder(services);
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Reflection;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Core;
|
||||
|
||||
public static class PluginBootstrapper
|
||||
{
|
||||
private static readonly string[] ConnectorPatterns =
|
||||
{
|
||||
"StellaOps.Feedser.Source.*.dll"
|
||||
};
|
||||
|
||||
private static readonly string[] ExporterPatterns =
|
||||
{
|
||||
"StellaOps.Feedser.Exporter.*.dll"
|
||||
};
|
||||
|
||||
public static IReadOnlyList<IConnectorPlugin> LoadConnectorPlugins(IServiceProvider? services = null, string? baseDirectory = null)
|
||||
{
|
||||
services ??= NullServiceProvider.Instance;
|
||||
var catalog = BuildCatalog(baseDirectory, ConnectorPatterns);
|
||||
return catalog.GetAvailableConnectorPlugins(services);
|
||||
}
|
||||
|
||||
public static IReadOnlyList<IExporterPlugin> LoadExporterPlugins(IServiceProvider? services = null, string? baseDirectory = null)
|
||||
{
|
||||
services ??= NullServiceProvider.Instance;
|
||||
var catalog = BuildCatalog(baseDirectory, ExporterPatterns);
|
||||
return catalog.GetAvailableExporterPlugins(services);
|
||||
}
|
||||
|
||||
private static PluginCatalog BuildCatalog(string? baseDirectory, IReadOnlyCollection<string> patterns)
|
||||
{
|
||||
var catalog = new PluginCatalog();
|
||||
|
||||
foreach (var assembly in AppDomain.CurrentDomain.GetAssemblies())
|
||||
{
|
||||
if (assembly.FullName is { } name && name.StartsWith("StellaOps.Feedser", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
catalog.AddAssembly(assembly);
|
||||
}
|
||||
}
|
||||
|
||||
baseDirectory ??= AppContext.BaseDirectory;
|
||||
foreach (var pattern in patterns)
|
||||
{
|
||||
catalog.AddFromDirectory(baseDirectory, pattern);
|
||||
}
|
||||
|
||||
return catalog;
|
||||
}
|
||||
|
||||
private sealed class NullServiceProvider : IServiceProvider
|
||||
{
|
||||
public static NullServiceProvider Instance { get; } = new();
|
||||
public object? GetService(Type serviceType) => null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Merge/StellaOps.Feedser.Merge.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Exporter.Json/StellaOps.Feedser.Exporter.Json.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Exporter.TrivyDb/StellaOps.Feedser.Exporter.TrivyDb.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="8.0.0" />
|
||||
<PackageReference Include="Cronos" Version="0.10.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
14
src/StellaOps.Feedser/StellaOps.Feedser.Core/TASKS.md
Normal file
14
src/StellaOps.Feedser/StellaOps.Feedser.Core/TASKS.md
Normal file
@@ -0,0 +1,14 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|JobCoordinator implementation (create/get/mark status)|BE-Core|Storage.Mongo|DONE – `JobCoordinator` drives Mongo-backed runs.|
|
||||
|Cron scheduling loop with TimeProvider|BE-Core|Core|DONE – `JobSchedulerHostedService` evaluates cron expressions.|
|
||||
|Single-flight/lease semantics|BE-Core|Storage.Mongo|DONE – lease acquisition backed by `MongoLeaseStore`.|
|
||||
|Trigger API contract (Result mapping)|BE-Core|WebService|DONE – `JobTriggerResult` outcomes map to HTTP statuses.|
|
||||
|Run telemetry enrichment|BE-Core|Observability|DONE – `JobDiagnostics` ties activities & counters into coordinator/scheduler paths.|
|
||||
|Deterministic params hashing|BE-Core|Core|DONE – `JobParametersHasher` creates SHA256 hash.|
|
||||
|Golden tests for timeout/cancel|QA|Core|DONE – JobCoordinatorTests cover cancellation timeout path.|
|
||||
|JobSchedulerBuilder options registry coverage|BE-Core|Core|TODO – verify cron/timeout/lease metadata persists for scheduler surfaces.|
|
||||
|Plugin discovery + DI glue with PluginHost|BE-Core|Plugin libs|TODO – auto-register job routines for connectors/exporters.|
|
||||
|Harden lease release error handling in JobCoordinator|BE-Core|Storage.Mongo|DONE – lease release failures now logged, wrapped, and drive run failure status; fire-and-forget execution guarded. Verified with `dotnet test --no-build --filter JobCoordinator`.|
|
||||
|Validate job trigger parameters for serialization|BE-Core|WebService|DONE – trigger parameters normalized/serialized with defensive checks returning InvalidParameters on failure. Full-suite `dotnet test --no-build` currently red from live connector fixture drift (Oracle/JVN/RedHat).|
|
||||
@@ -0,0 +1,23 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
Optional exporter producing vuln-list-shaped JSON tree for downstream trivy-db builder or interoperability. Deterministic, provenance-preserving.
|
||||
## Scope
|
||||
- Transform canonical advisories into directory tree structure mirroring aquasecurity/vuln-list (by ecosystem/vendor/distro as applicable).
|
||||
- Sorting and serialization invariants: stable key order, newline policy, UTC ISO-8601.
|
||||
- Cursoring/incremental export: export_state tracks last advisory hash/time to avoid full rewrites.
|
||||
- Packaging: output directory under exports/json/<timestamp> with reproducible naming; optionally symlink latest.
|
||||
- Optional auxiliary index files (for example severity summaries) may be generated when explicitly requested, but must remain deterministic and avoid altering canonical payloads.
|
||||
## Participants
|
||||
- Storage.Mongo.AdvisoryStore as input; ExportState repository for cursors/digests.
|
||||
- Core scheduler runs JsonExportJob; Plugin DI wires JsonExporter + job.
|
||||
- TrivyDb exporter may consume the rendered tree in v0 (builder path) if configured.
|
||||
## Interfaces & contracts
|
||||
- Job kind: export:json (JsonExportJob).
|
||||
- Determinism: same inputs -> identical file bytes; hash snapshot persisted.
|
||||
- Provenance: include minimal provenance fields when helpful; keep identity stable.
|
||||
## In/Out of scope
|
||||
In: JSON rendering and layout; incremental/deterministic writes.
|
||||
Out: ORAS push and Trivy DB BoltDB writing (owned by Trivy exporter).
|
||||
## Observability & security expectations
|
||||
- Metrics: export.json.records, bytes, duration, delta.changed.
|
||||
- Logs: target path, record counts, digest; no sensitive data.
|
||||
@@ -1,25 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public sealed class JsonExporterPlugin : IExporterPlugin
|
||||
{
|
||||
public string Name => "json";
|
||||
|
||||
public bool IsAvailable(IServiceProvider services) => true;
|
||||
|
||||
public IFeedExporter Create(IServiceProvider services) => new StubExporter(Name);
|
||||
|
||||
private sealed class StubExporter : IFeedExporter
|
||||
{
|
||||
public StubExporter(string name) => Name = name;
|
||||
|
||||
public string Name { get; }
|
||||
|
||||
public Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public static class ExportDigestCalculator
|
||||
{
|
||||
public static string ComputeTreeDigest(JsonExportResult result)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
|
||||
using var sha256 = SHA256.Create();
|
||||
var buffer = new byte[128 * 1024];
|
||||
|
||||
foreach (var file in result.FilePaths.OrderBy(static path => path, StringComparer.Ordinal))
|
||||
{
|
||||
var normalized = file.Replace("\\", "/");
|
||||
var pathBytes = Encoding.UTF8.GetBytes(normalized);
|
||||
_ = sha256.TransformBlock(pathBytes, 0, pathBytes.Length, null, 0);
|
||||
|
||||
var fullPath = ResolveFullPath(result.ExportDirectory, normalized);
|
||||
using var stream = File.OpenRead(fullPath);
|
||||
int read;
|
||||
while ((read = stream.Read(buffer, 0, buffer.Length)) > 0)
|
||||
{
|
||||
_ = sha256.TransformBlock(buffer, 0, read, null, 0);
|
||||
}
|
||||
}
|
||||
|
||||
_ = sha256.TransformFinalBlock(Array.Empty<byte>(), 0, 0);
|
||||
var hash = sha256.Hash ?? Array.Empty<byte>();
|
||||
var hex = Convert.ToHexString(hash).ToLowerInvariant();
|
||||
return $"sha256:{hex}";
|
||||
}
|
||||
|
||||
private static string ResolveFullPath(string root, string normalizedRelativePath)
|
||||
{
|
||||
var segments = normalizedRelativePath.Split('/', StringSplitOptions.RemoveEmptyEntries);
|
||||
var parts = new string[segments.Length + 1];
|
||||
parts[0] = root;
|
||||
for (var i = 0; i < segments.Length; i++)
|
||||
{
|
||||
parts[i + 1] = segments[i];
|
||||
}
|
||||
|
||||
return Path.Combine(parts);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
using System;
|
||||
using System.Reflection;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public static class ExporterVersion
|
||||
{
|
||||
public static string GetVersion(Type anchor)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(anchor);
|
||||
var assembly = anchor.Assembly;
|
||||
|
||||
var informational = assembly.GetCustomAttribute<AssemblyInformationalVersionAttribute>()?.InformationalVersion;
|
||||
if (!string.IsNullOrWhiteSpace(informational))
|
||||
{
|
||||
return informational;
|
||||
}
|
||||
|
||||
var fileVersion = assembly.GetCustomAttribute<AssemblyFileVersionAttribute>()?.Version;
|
||||
if (!string.IsNullOrWhiteSpace(fileVersion))
|
||||
{
|
||||
return fileVersion!;
|
||||
}
|
||||
|
||||
var version = assembly.GetName().Version;
|
||||
return version?.ToString() ?? "0.0.0";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
using StellaOps.Feedser.Models;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public interface IJsonExportPathResolver
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns the relative path (using platform directory separators) for the supplied advisory.
|
||||
/// Path must not include the leading export root.
|
||||
/// </summary>
|
||||
string GetRelativePath(Advisory advisory);
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
/// <summary>
|
||||
/// Metadata describing a single file produced by the JSON exporter.
|
||||
/// </summary>
|
||||
public sealed class JsonExportFile
|
||||
{
|
||||
public JsonExportFile(string relativePath, long length, string digest)
|
||||
{
|
||||
RelativePath = relativePath ?? throw new ArgumentNullException(nameof(relativePath));
|
||||
if (relativePath.Length == 0)
|
||||
{
|
||||
throw new ArgumentException("Relative path cannot be empty.", nameof(relativePath));
|
||||
}
|
||||
|
||||
if (length < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
}
|
||||
|
||||
Digest = digest ?? throw new ArgumentNullException(nameof(digest));
|
||||
if (digest.Length == 0)
|
||||
{
|
||||
throw new ArgumentException("Digest cannot be empty.", nameof(digest));
|
||||
}
|
||||
|
||||
Length = length;
|
||||
}
|
||||
|
||||
public string RelativePath { get; }
|
||||
|
||||
public long Length { get; }
|
||||
|
||||
public string Digest { get; }
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public sealed class JsonExportJob : IJob
|
||||
{
|
||||
public const string JobKind = "export:json";
|
||||
public static readonly TimeSpan DefaultTimeout = TimeSpan.FromMinutes(10);
|
||||
public static readonly TimeSpan DefaultLeaseDuration = TimeSpan.FromMinutes(5);
|
||||
|
||||
private readonly JsonFeedExporter _exporter;
|
||||
private readonly ILogger<JsonExportJob> _logger;
|
||||
|
||||
public JsonExportJob(JsonFeedExporter exporter, ILogger<JsonExportJob> logger)
|
||||
{
|
||||
_exporter = exporter ?? throw new ArgumentNullException(nameof(exporter));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("Executing JSON export job {RunId}", context.RunId);
|
||||
await _exporter.ExportAsync(context.Services, cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation("Completed JSON export job {RunId}", context.RunId);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
internal static class JsonExportManifestWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = true,
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
JsonExportResult result,
|
||||
string digest,
|
||||
string exporterVersion,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
ArgumentException.ThrowIfNullOrEmpty(digest);
|
||||
ArgumentException.ThrowIfNullOrEmpty(exporterVersion);
|
||||
|
||||
var exportId = Path.GetFileName(result.ExportDirectory);
|
||||
var files = result.Files
|
||||
.Select(static file => new JsonExportManifestFile(file.RelativePath.Replace("\\", "/", StringComparison.Ordinal), file.Length, file.Digest))
|
||||
.ToArray();
|
||||
|
||||
var manifest = new JsonExportManifest(
|
||||
exportId,
|
||||
result.ExportedAt.UtcDateTime,
|
||||
digest,
|
||||
result.AdvisoryCount,
|
||||
result.TotalBytes,
|
||||
files.Length,
|
||||
files,
|
||||
exporterVersion);
|
||||
|
||||
var payload = JsonSerializer.SerializeToUtf8Bytes(manifest, SerializerOptions);
|
||||
var manifestPath = Path.Combine(result.ExportDirectory, "manifest.json");
|
||||
await File.WriteAllBytesAsync(manifestPath, payload, cancellationToken).ConfigureAwait(false);
|
||||
File.SetLastWriteTimeUtc(manifestPath, result.ExportedAt.UtcDateTime);
|
||||
}
|
||||
|
||||
private sealed record JsonExportManifest(
|
||||
[property: JsonPropertyOrder(1)] string ExportId,
|
||||
[property: JsonPropertyOrder(2)] DateTime GeneratedAt,
|
||||
[property: JsonPropertyOrder(3)] string Digest,
|
||||
[property: JsonPropertyOrder(4)] int AdvisoryCount,
|
||||
[property: JsonPropertyOrder(5)] long TotalBytes,
|
||||
[property: JsonPropertyOrder(6)] int FileCount,
|
||||
[property: JsonPropertyOrder(7)] IReadOnlyList<JsonExportManifestFile> Files,
|
||||
[property: JsonPropertyOrder(8)] string ExporterVersion);
|
||||
|
||||
private sealed record JsonExportManifestFile(
|
||||
[property: JsonPropertyOrder(1)] string Path,
|
||||
[property: JsonPropertyOrder(2)] long Bytes,
|
||||
[property: JsonPropertyOrder(3)] string Digest);
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
using System.IO;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for JSON exporter output paths and determinism controls.
|
||||
/// </summary>
|
||||
public sealed class JsonExportOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Root directory where exports are written. Default "exports/json".
|
||||
/// </summary>
|
||||
public string OutputRoot { get; set; } = Path.Combine("exports", "json");
|
||||
|
||||
/// <summary>
|
||||
/// Format string applied to the export timestamp to produce the directory name.
|
||||
/// </summary>
|
||||
public string DirectoryNameFormat { get; set; } = "yyyyMMdd'T'HHmmss'Z'";
|
||||
|
||||
/// <summary>
|
||||
/// Optional static name for the symlink (or directory junction) pointing at the most recent export.
|
||||
/// </summary>
|
||||
public string LatestSymlinkName { get; set; } = "latest";
|
||||
|
||||
/// <summary>
|
||||
/// When true, attempts to re-point <see cref="LatestSymlinkName"/> after a successful export.
|
||||
/// </summary>
|
||||
public bool MaintainLatestSymlink { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Optional repository identifier recorded alongside export state metadata.
|
||||
/// </summary>
|
||||
public string? TargetRepository { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public sealed class JsonExportResult
|
||||
{
|
||||
public JsonExportResult(
|
||||
string exportDirectory,
|
||||
DateTimeOffset exportedAt,
|
||||
IEnumerable<JsonExportFile> files,
|
||||
int advisoryCount,
|
||||
long totalBytes)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(exportDirectory))
|
||||
{
|
||||
throw new ArgumentException("Export directory must be provided.", nameof(exportDirectory));
|
||||
}
|
||||
|
||||
ExportDirectory = exportDirectory;
|
||||
ExportedAt = exportedAt;
|
||||
AdvisoryCount = advisoryCount;
|
||||
TotalBytes = totalBytes;
|
||||
|
||||
var list = (files ?? throw new ArgumentNullException(nameof(files)))
|
||||
.Where(static file => file is not null)
|
||||
.ToImmutableArray();
|
||||
|
||||
Files = list;
|
||||
FilePaths = list.Select(static file => file.RelativePath).ToImmutableArray();
|
||||
}
|
||||
|
||||
public string ExportDirectory { get; }
|
||||
|
||||
public DateTimeOffset ExportedAt { get; }
|
||||
|
||||
public ImmutableArray<JsonExportFile> Files { get; }
|
||||
|
||||
public ImmutableArray<string> FilePaths { get; }
|
||||
|
||||
public int AdvisoryCount { get; }
|
||||
|
||||
public long TotalBytes { get; }
|
||||
}
|
||||
@@ -0,0 +1,239 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Feedser.Models;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
/// <summary>
|
||||
/// Writes canonical advisory snapshots into a vuln-list style directory tree with deterministic ordering.
|
||||
/// </summary>
|
||||
public sealed class JsonExportSnapshotBuilder
|
||||
{
|
||||
private static readonly Encoding Utf8NoBom = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false);
|
||||
private readonly JsonExportOptions _options;
|
||||
private readonly IJsonExportPathResolver _pathResolver;
|
||||
|
||||
public JsonExportSnapshotBuilder(JsonExportOptions options, IJsonExportPathResolver pathResolver)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
|
||||
}
|
||||
|
||||
public Task<JsonExportResult> WriteAsync(
|
||||
IReadOnlyCollection<Advisory> advisories,
|
||||
DateTimeOffset exportedAt,
|
||||
string? exportName = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (advisories is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(advisories));
|
||||
}
|
||||
|
||||
return WriteAsync(EnumerateAsync(advisories, cancellationToken), exportedAt, exportName, cancellationToken);
|
||||
}
|
||||
|
||||
public async Task<JsonExportResult> WriteAsync(
|
||||
IAsyncEnumerable<Advisory> advisories,
|
||||
DateTimeOffset exportedAt,
|
||||
string? exportName = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (advisories is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(advisories));
|
||||
}
|
||||
|
||||
var exportDirectoryName = exportName ?? exportedAt.UtcDateTime.ToString(_options.DirectoryNameFormat, CultureInfo.InvariantCulture);
|
||||
if (string.IsNullOrWhiteSpace(exportDirectoryName))
|
||||
{
|
||||
throw new InvalidOperationException("Export directory name resolved to an empty string.");
|
||||
}
|
||||
|
||||
var exportRoot = EnsureDirectoryExists(Path.GetFullPath(_options.OutputRoot));
|
||||
TrySetDirectoryTimestamp(exportRoot, exportedAt);
|
||||
var exportDirectory = Path.Combine(exportRoot, exportDirectoryName);
|
||||
|
||||
if (Directory.Exists(exportDirectory))
|
||||
{
|
||||
Directory.Delete(exportDirectory, recursive: true);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(exportDirectory);
|
||||
TrySetDirectoryTimestamp(exportDirectory, exportedAt);
|
||||
|
||||
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var files = new List<JsonExportFile>();
|
||||
long totalBytes = 0L;
|
||||
var advisoryCount = 0;
|
||||
|
||||
await foreach (var advisory in advisories.WithCancellation(cancellationToken))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
advisoryCount++;
|
||||
var entry = Resolve(advisory);
|
||||
if (!seen.Add(entry.RelativePath))
|
||||
{
|
||||
throw new InvalidOperationException($"Multiple advisories resolved to the same path '{entry.RelativePath}'.");
|
||||
}
|
||||
|
||||
var destination = Combine(exportDirectory, entry.Segments);
|
||||
var destinationDirectory = Path.GetDirectoryName(destination);
|
||||
if (!string.IsNullOrEmpty(destinationDirectory))
|
||||
{
|
||||
EnsureDirectoryExists(destinationDirectory);
|
||||
TrySetDirectoryTimestamp(destinationDirectory, exportedAt);
|
||||
}
|
||||
var payload = SnapshotSerializer.ToSnapshot(entry.Advisory);
|
||||
var bytes = Utf8NoBom.GetBytes(payload);
|
||||
|
||||
await File.WriteAllBytesAsync(destination, bytes, cancellationToken).ConfigureAwait(false);
|
||||
File.SetLastWriteTimeUtc(destination, exportedAt.UtcDateTime);
|
||||
|
||||
var digest = ComputeDigest(bytes);
|
||||
files.Add(new JsonExportFile(entry.RelativePath, bytes.LongLength, digest));
|
||||
totalBytes += bytes.LongLength;
|
||||
}
|
||||
|
||||
files.Sort(static (left, right) => string.CompareOrdinal(left.RelativePath, right.RelativePath));
|
||||
|
||||
return new JsonExportResult(exportDirectory, exportedAt, files, advisoryCount, totalBytes);
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<Advisory> EnumerateAsync(
|
||||
IEnumerable<Advisory> advisories,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken)
|
||||
{
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
yield return advisory;
|
||||
await Task.Yield();
|
||||
}
|
||||
}
|
||||
|
||||
private static string EnsureDirectoryExists(string directory)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(directory))
|
||||
{
|
||||
throw new ArgumentException("Directory path must be provided.", nameof(directory));
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(directory);
|
||||
return directory;
|
||||
}
|
||||
|
||||
private static string Combine(string root, IReadOnlyList<string> segments)
|
||||
{
|
||||
var parts = new string[segments.Count + 1];
|
||||
parts[0] = root;
|
||||
for (var i = 0; i < segments.Count; i++)
|
||||
{
|
||||
parts[i + 1] = segments[i];
|
||||
}
|
||||
|
||||
return Path.Combine(parts);
|
||||
}
|
||||
|
||||
private static void TrySetDirectoryTimestamp(string directory, DateTimeOffset timestamp)
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.SetLastWriteTimeUtc(directory, timestamp.UtcDateTime);
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// Ignore failure to set timestamps; not critical for content determinism.
|
||||
}
|
||||
catch (UnauthorizedAccessException)
|
||||
{
|
||||
// Ignore permission issues when setting timestamps.
|
||||
}
|
||||
catch (PlatformNotSupportedException)
|
||||
{
|
||||
// Some platforms may not support this operation.
|
||||
}
|
||||
}
|
||||
|
||||
private PathResolution Resolve(Advisory advisory)
|
||||
{
|
||||
if (advisory is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(advisory));
|
||||
}
|
||||
|
||||
var relativePath = _pathResolver.GetRelativePath(advisory);
|
||||
var segments = NormalizeRelativePath(relativePath);
|
||||
var normalized = string.Join('/', segments);
|
||||
return new PathResolution(advisory, normalized, segments);
|
||||
}
|
||||
|
||||
private static string[] NormalizeRelativePath(string relativePath)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(relativePath))
|
||||
{
|
||||
throw new InvalidOperationException("Path resolver returned an empty path.");
|
||||
}
|
||||
|
||||
if (Path.IsPathRooted(relativePath))
|
||||
{
|
||||
throw new InvalidOperationException("Path resolver returned an absolute path; only relative paths are supported.");
|
||||
}
|
||||
|
||||
var pieces = relativePath.Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (pieces.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Path resolver produced no path segments.");
|
||||
}
|
||||
|
||||
var sanitized = new string[pieces.Length];
|
||||
for (var i = 0; i < pieces.Length; i++)
|
||||
{
|
||||
var segment = pieces[i];
|
||||
if (segment == "." || segment == "..")
|
||||
{
|
||||
throw new InvalidOperationException("Relative paths cannot include '.' or '..' segments.");
|
||||
}
|
||||
|
||||
sanitized[i] = SanitizeSegment(segment);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
private static string SanitizeSegment(string segment)
|
||||
{
|
||||
var invalid = Path.GetInvalidFileNameChars();
|
||||
Span<char> buffer = stackalloc char[segment.Length];
|
||||
var count = 0;
|
||||
foreach (var ch in segment)
|
||||
{
|
||||
if (ch == '/' || ch == '\\' || Array.IndexOf(invalid, ch) >= 0)
|
||||
{
|
||||
buffer[count++] = '_';
|
||||
}
|
||||
else
|
||||
{
|
||||
buffer[count++] = ch;
|
||||
}
|
||||
}
|
||||
|
||||
var sanitized = new string(buffer[..count]).Trim();
|
||||
return string.IsNullOrEmpty(sanitized) ? "_" : sanitized;
|
||||
}
|
||||
|
||||
private sealed record PathResolution(Advisory Advisory, string RelativePath, IReadOnlyList<string> Segments);
|
||||
|
||||
private static string ComputeDigest(ReadOnlySpan<byte> payload)
|
||||
{
|
||||
var hash = SHA256.HashData(payload);
|
||||
var hex = Convert.ToHexString(hash).ToLowerInvariant();
|
||||
return $"sha256:{hex}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.DependencyInjection;
|
||||
using StellaOps.Feedser.Core.Jobs;
|
||||
using StellaOps.Feedser.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public sealed class JsonExporterDependencyInjectionRoutine : IDependencyInjectionRoutine
|
||||
{
|
||||
private const string ConfigurationSection = "feedser:exporters:json";
|
||||
|
||||
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
services.TryAddSingleton<IJsonExportPathResolver, VulnListJsonExportPathResolver>();
|
||||
services.TryAddSingleton<ExportStateManager>();
|
||||
|
||||
services.AddOptions<JsonExportOptions>()
|
||||
.Bind(configuration.GetSection(ConfigurationSection))
|
||||
.PostConfigure(static options =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(options.OutputRoot))
|
||||
{
|
||||
options.OutputRoot = Path.Combine("exports", "json");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(options.DirectoryNameFormat))
|
||||
{
|
||||
options.DirectoryNameFormat = "yyyyMMdd'T'HHmmss'Z'";
|
||||
}
|
||||
});
|
||||
|
||||
services.AddSingleton<JsonFeedExporter>();
|
||||
services.AddTransient<JsonExportJob>();
|
||||
|
||||
services.PostConfigure<JobSchedulerOptions>(options =>
|
||||
{
|
||||
if (!options.Definitions.ContainsKey(JsonExportJob.JobKind))
|
||||
{
|
||||
options.Definitions[JsonExportJob.JobKind] = new JobDefinition(
|
||||
JsonExportJob.JobKind,
|
||||
typeof(JsonExportJob),
|
||||
JsonExportJob.DefaultTimeout,
|
||||
JsonExportJob.DefaultLeaseDuration,
|
||||
null,
|
||||
true);
|
||||
}
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public sealed class JsonExporterPlugin : IExporterPlugin
|
||||
{
|
||||
public string Name => JsonFeedExporter.ExporterName;
|
||||
|
||||
public bool IsAvailable(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return services.GetService<IAdvisoryStore>() is not null;
|
||||
}
|
||||
|
||||
public IFeedExporter Create(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return ActivatorUtilities.CreateInstance<JsonFeedExporter>(services);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||
using StellaOps.Feedser.Storage.Mongo.Exporting;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
public sealed class JsonFeedExporter : IFeedExporter
|
||||
{
|
||||
public const string ExporterName = "json";
|
||||
public const string ExporterId = "export:json";
|
||||
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly JsonExportOptions _options;
|
||||
private readonly IJsonExportPathResolver _pathResolver;
|
||||
private readonly ExportStateManager _stateManager;
|
||||
private readonly ILogger<JsonFeedExporter> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly string _exporterVersion;
|
||||
|
||||
public JsonFeedExporter(
|
||||
IAdvisoryStore advisoryStore,
|
||||
IOptions<JsonExportOptions> options,
|
||||
IJsonExportPathResolver pathResolver,
|
||||
ExportStateManager stateManager,
|
||||
ILogger<JsonFeedExporter> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
|
||||
_stateManager = stateManager ?? throw new ArgumentNullException(nameof(stateManager));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_exporterVersion = ExporterVersion.GetVersion(typeof(JsonFeedExporter));
|
||||
}
|
||||
|
||||
public string Name => ExporterName;
|
||||
|
||||
public async Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
var exportedAt = _timeProvider.GetUtcNow();
|
||||
var exportId = exportedAt.ToString(_options.DirectoryNameFormat, CultureInfo.InvariantCulture);
|
||||
var exportRoot = Path.GetFullPath(_options.OutputRoot);
|
||||
|
||||
_logger.LogInformation("Starting JSON export {ExportId}", exportId);
|
||||
|
||||
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var builder = new JsonExportSnapshotBuilder(_options, _pathResolver);
|
||||
var advisoryStream = _advisoryStore.StreamAsync(cancellationToken);
|
||||
var result = await builder.WriteAsync(advisoryStream, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var digest = ExportDigestCalculator.ComputeTreeDigest(result);
|
||||
_logger.LogInformation(
|
||||
"JSON export {ExportId} wrote {FileCount} files ({Bytes} bytes) covering {AdvisoryCount} advisories with digest {Digest}",
|
||||
exportId,
|
||||
result.Files.Length,
|
||||
result.TotalBytes,
|
||||
result.AdvisoryCount,
|
||||
digest);
|
||||
|
||||
if (existingState is not null && string.Equals(existingState.LastFullDigest, digest, StringComparison.Ordinal))
|
||||
{
|
||||
_logger.LogInformation("JSON export {ExportId} produced unchanged digest; skipping state update.", exportId);
|
||||
TryDeleteDirectory(result.ExportDirectory);
|
||||
return;
|
||||
}
|
||||
|
||||
await _stateManager.StoreFullExportAsync(
|
||||
ExporterId,
|
||||
exportId,
|
||||
digest,
|
||||
cursor: digest,
|
||||
targetRepository: _options.TargetRepository,
|
||||
exporterVersion: _exporterVersion,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await JsonExportManifestWriter.WriteAsync(result, digest, _exporterVersion, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (_options.MaintainLatestSymlink)
|
||||
{
|
||||
TryUpdateLatestSymlink(exportRoot, result.ExportDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
private void TryUpdateLatestSymlink(string exportRoot, string exportDirectory)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(_options.LatestSymlinkName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var latestPath = Path.Combine(exportRoot, _options.LatestSymlinkName);
|
||||
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(latestPath) || File.Exists(latestPath))
|
||||
{
|
||||
TryRemoveExistingPointer(latestPath);
|
||||
}
|
||||
|
||||
Directory.CreateSymbolicLink(latestPath, exportDirectory);
|
||||
_logger.LogDebug("Updated latest JSON export pointer to {Target}", exportDirectory);
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or PlatformNotSupportedException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to update latest JSON export pointer at {LatestPath}", latestPath);
|
||||
}
|
||||
}
|
||||
|
||||
private void TryRemoveExistingPointer(string latestPath)
|
||||
{
|
||||
try
|
||||
{
|
||||
var attributes = File.GetAttributes(latestPath);
|
||||
if (attributes.HasFlag(FileAttributes.Directory))
|
||||
{
|
||||
Directory.Delete(latestPath, recursive: false);
|
||||
}
|
||||
else
|
||||
{
|
||||
File.Delete(latestPath);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to remove existing latest pointer {LatestPath}", latestPath);
|
||||
}
|
||||
}
|
||||
|
||||
private void TryDeleteDirectory(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(path))
|
||||
{
|
||||
Directory.Delete(path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to remove unchanged export directory {ExportDirectory}", path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,22 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Normalization\StellaOps.Feedser.Normalization.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|Directory layout strategy (vuln-list mirror)|BE-Export|Models|DONE – `VulnListJsonExportPathResolver` maps CVE, GHSA, distro, and vendor identifiers into vuln-list style paths.|
|
||||
|Deterministic serializer|BE-Export|Models|DONE – Canonical serializer + snapshot builder emit stable JSON across runs.|
|
||||
|ExportState read/write|BE-Export|Storage.Mongo|DONE – `JsonFeedExporter` reads prior state, stores digests/cursors, and skips unchanged exports.|
|
||||
|JsonExportJob wiring|BE-Export|Core|DONE – Job scheduler options now configurable via DI; JSON job registered with scheduler.|
|
||||
|Snapshot tests for file tree|QA|Exporters|DONE – Added resolver/exporter tests asserting tree layout and deterministic behavior.|
|
||||
|Parity smoke vs upstream vuln-list|QA|Exporters|DONE – `JsonExporterParitySmokeTests` covers common ecosystems against vuln-list layout.|
|
||||
|Stream advisories during export|BE-Export|Storage.Mongo|DONE – exporter + streaming-only test ensures single enumeration and per-file digest capture.|
|
||||
|Emit export manifest with digest metadata|BE-Export|Exporters|DONE – manifest now includes per-file digests/sizes alongside tree digest.|
|
||||
@@ -0,0 +1,455 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Feedser.Models;
|
||||
using StellaOps.Feedser.Normalization.Identifiers;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
/// <summary>
|
||||
/// Path resolver approximating the directory layout used by aquasecurity/vuln-list.
|
||||
/// Handles common vendor, distro, and ecosystem shapes with deterministic fallbacks.
|
||||
/// </summary>
|
||||
public sealed class VulnListJsonExportPathResolver : IJsonExportPathResolver
|
||||
{
|
||||
private static readonly Regex CvePattern = new("^CVE-(?<year>\\d{4})-(?<id>\\d{4,})$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex GhsaPattern = new("^GHSA-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex UsnPattern = new("^USN-(?<id>\\d+-\\d+)(?<suffix>[a-z])?$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex DebianPattern = new("^(?<prefix>DLA|DSA|ELA)-(?<id>\\d+-\\d+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex RedHatPattern = new("^RH(?<type>SA|BA|EA)-(?<rest>[0-9:.-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex AmazonPattern = new("^ALAS(?<channel>2|2022|2023)?-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex OraclePattern = new("^(?<kind>ELSA|ELBA|ELSA-OCI|ELBA-OCI)-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex PhotonPattern = new("^PHSA-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex RockyPattern = new("^RLSA-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex SusePattern = new("^SUSE-(?<kind>SU|RU|OU|SB)-(?<rest>[0-9A-Za-z:._-]+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
|
||||
private static readonly Dictionary<string, string[]> SourceDirectoryMap = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["nvd"] = new[] { "nvd" },
|
||||
["ghsa"] = new[] { "ghsa" },
|
||||
["github"] = new[] { "ghsa" },
|
||||
["osv"] = new[] { "osv" },
|
||||
["redhat"] = new[] { "redhat", "oval" },
|
||||
["ubuntu"] = new[] { "ubuntu" },
|
||||
["debian"] = new[] { "debian" },
|
||||
["oracle"] = new[] { "oracle" },
|
||||
["photon"] = new[] { "photon" },
|
||||
["rocky"] = new[] { "rocky" },
|
||||
["suse"] = new[] { "suse" },
|
||||
["amazon"] = new[] { "amazon" },
|
||||
["aws"] = new[] { "amazon" },
|
||||
["alpine"] = new[] { "alpine" },
|
||||
["wolfi"] = new[] { "wolfi" },
|
||||
["chainguard"] = new[] { "chainguard" },
|
||||
["cert-fr"] = new[] { "cert", "fr" },
|
||||
["cert-in"] = new[] { "cert", "in" },
|
||||
["cert-cc"] = new[] { "cert", "cc" },
|
||||
["cert-bund"] = new[] { "cert", "bund" },
|
||||
["cisa"] = new[] { "ics", "cisa" },
|
||||
["ics-cisa"] = new[] { "ics", "cisa" },
|
||||
["ics-kaspersky"] = new[] { "ics", "kaspersky" },
|
||||
["kaspersky"] = new[] { "ics", "kaspersky" },
|
||||
};
|
||||
|
||||
private static readonly Dictionary<string, string> GhsaEcosystemMap = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["go"] = "go",
|
||||
["golang"] = "go",
|
||||
["npm"] = "npm",
|
||||
["maven"] = "maven",
|
||||
["pypi"] = "pip",
|
||||
["pip"] = "pip",
|
||||
["nuget"] = "nuget",
|
||||
["composer"] = "composer",
|
||||
["packagist"] = "composer",
|
||||
["rubygems"] = "rubygems",
|
||||
["gem"] = "rubygems",
|
||||
["swift"] = "swift",
|
||||
["cargo"] = "cargo",
|
||||
["hex"] = "hex",
|
||||
["pub"] = "pub",
|
||||
["github"] = "github",
|
||||
["docker"] = "container",
|
||||
};
|
||||
|
||||
public string GetRelativePath(Advisory advisory)
|
||||
{
|
||||
if (advisory is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(advisory));
|
||||
}
|
||||
|
||||
var identifier = SelectPreferredIdentifier(advisory);
|
||||
if (identifier.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Unable to derive identifier for advisory.");
|
||||
}
|
||||
|
||||
var layout = ResolveLayout(advisory, identifier);
|
||||
var segments = new string[layout.Segments.Length + 1];
|
||||
for (var i = 0; i < layout.Segments.Length; i++)
|
||||
{
|
||||
segments[i] = layout.Segments[i];
|
||||
}
|
||||
segments[^1] = layout.FileName;
|
||||
return Path.Combine(segments);
|
||||
}
|
||||
|
||||
private static Layout ResolveLayout(Advisory advisory, string identifier)
|
||||
{
|
||||
if (TryResolveCve(identifier, out var layout))
|
||||
{
|
||||
return layout;
|
||||
}
|
||||
|
||||
if (TryResolveGhsa(advisory, identifier, out layout))
|
||||
{
|
||||
return layout;
|
||||
}
|
||||
|
||||
if (TryResolveUsn(identifier, out layout) ||
|
||||
TryResolveDebian(identifier, out layout) ||
|
||||
TryResolveRedHat(identifier, out layout) ||
|
||||
TryResolveAmazon(identifier, out layout) ||
|
||||
TryResolveOracle(identifier, out layout) ||
|
||||
TryResolvePhoton(identifier, out layout) ||
|
||||
TryResolveRocky(identifier, out layout) ||
|
||||
TryResolveSuse(identifier, out layout))
|
||||
{
|
||||
return layout;
|
||||
}
|
||||
|
||||
if (TryResolveByProvenance(advisory, identifier, out layout))
|
||||
{
|
||||
return layout;
|
||||
}
|
||||
|
||||
return new Layout(new[] { "misc" }, CreateFileName(identifier));
|
||||
}
|
||||
|
||||
private static bool TryResolveCve(string identifier, out Layout layout)
|
||||
{
|
||||
var match = CvePattern.Match(identifier);
|
||||
if (!match.Success)
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
var year = match.Groups["year"].Value;
|
||||
layout = new Layout(new[] { "nvd", year }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveGhsa(Advisory advisory, string identifier, out Layout layout)
|
||||
{
|
||||
if (!GhsaPattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (TryGetGhsaPackage(advisory, out var ecosystem, out var packagePath))
|
||||
{
|
||||
layout = new Layout(new[] { "ghsa", ecosystem, packagePath }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "github", "advisories" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveUsn(string identifier, out Layout layout)
|
||||
{
|
||||
if (!UsnPattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "ubuntu" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveDebian(string identifier, out Layout layout)
|
||||
{
|
||||
var match = DebianPattern.Match(identifier);
|
||||
if (!match.Success)
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "debian" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveRedHat(string identifier, out Layout layout)
|
||||
{
|
||||
if (!RedHatPattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "redhat", "oval" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveAmazon(string identifier, out Layout layout)
|
||||
{
|
||||
var match = AmazonPattern.Match(identifier);
|
||||
if (!match.Success)
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
var channel = match.Groups["channel"].Value;
|
||||
var subdirectory = channel switch
|
||||
{
|
||||
"2" => "2",
|
||||
"2023" => "2023",
|
||||
"2022" => "2022",
|
||||
_ => "1",
|
||||
};
|
||||
|
||||
layout = new Layout(new[] { "amazon", subdirectory }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveOracle(string identifier, out Layout layout)
|
||||
{
|
||||
if (!OraclePattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "oracle", "linux" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolvePhoton(string identifier, out Layout layout)
|
||||
{
|
||||
if (!PhotonPattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "photon" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveRocky(string identifier, out Layout layout)
|
||||
{
|
||||
if (!RockyPattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "rocky" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveSuse(string identifier, out Layout layout)
|
||||
{
|
||||
if (!SusePattern.IsMatch(identifier))
|
||||
{
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
layout = new Layout(new[] { "suse" }, CreateFileName(identifier, uppercase: true));
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryResolveByProvenance(Advisory advisory, string identifier, out Layout layout)
|
||||
{
|
||||
foreach (var source in EnumerateDistinctProvenanceSources(advisory))
|
||||
{
|
||||
if (SourceDirectoryMap.TryGetValue(source, out var segments))
|
||||
{
|
||||
layout = new Layout(segments, CreateFileName(identifier));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
layout = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool TryGetGhsaPackage(Advisory advisory, out string ecosystem, out string packagePath)
|
||||
{
|
||||
foreach (var package in advisory.AffectedPackages)
|
||||
{
|
||||
if (!TryParsePackageUrl(package.Identifier, out var type, out var encodedPath))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (GhsaEcosystemMap.TryGetValue(type, out var mapped))
|
||||
{
|
||||
ecosystem = mapped;
|
||||
}
|
||||
else
|
||||
{
|
||||
ecosystem = type.ToLowerInvariant();
|
||||
}
|
||||
|
||||
packagePath = encodedPath;
|
||||
return true;
|
||||
}
|
||||
|
||||
ecosystem = "advisories";
|
||||
packagePath = "_";
|
||||
return false;
|
||||
}
|
||||
|
||||
private static bool TryParsePackageUrl(string identifier, out string type, out string encodedPath)
|
||||
{
|
||||
type = string.Empty;
|
||||
encodedPath = string.Empty;
|
||||
|
||||
if (!IdentifierNormalizer.TryNormalizePackageUrl(identifier, out _, out var packageUrl))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var segments = packageUrl!.NamespaceSegments.IsDefaultOrEmpty
|
||||
? new[] { packageUrl.Name }
|
||||
: packageUrl.NamespaceSegments.Append(packageUrl.Name).ToArray();
|
||||
|
||||
type = packageUrl.Type;
|
||||
encodedPath = string.Join("%2F", segments);
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string CreateFileName(string identifier, bool uppercase = false)
|
||||
{
|
||||
var candidate = uppercase ? identifier.ToUpperInvariant() : identifier;
|
||||
return $"{SanitizeFileName(candidate)}.json";
|
||||
}
|
||||
|
||||
private static IEnumerable<string> EnumerateDistinctProvenanceSources(Advisory advisory)
|
||||
{
|
||||
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var source in advisory.Provenance)
|
||||
{
|
||||
if (TryAddSource(source.Source))
|
||||
{
|
||||
yield return source.Source;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var reference in advisory.References)
|
||||
{
|
||||
if (TryAddSource(reference.Provenance.Source))
|
||||
{
|
||||
yield return reference.Provenance.Source;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var package in advisory.AffectedPackages)
|
||||
{
|
||||
foreach (var source in package.Provenance)
|
||||
{
|
||||
if (TryAddSource(source.Source))
|
||||
{
|
||||
yield return source.Source;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var range in package.VersionRanges)
|
||||
{
|
||||
if (TryAddSource(range.Provenance.Source))
|
||||
{
|
||||
yield return range.Provenance.Source;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var metric in advisory.CvssMetrics)
|
||||
{
|
||||
if (TryAddSource(metric.Provenance.Source))
|
||||
{
|
||||
yield return metric.Provenance.Source;
|
||||
}
|
||||
}
|
||||
|
||||
bool TryAddSource(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return seen.Add(value);
|
||||
}
|
||||
}
|
||||
|
||||
private static string SelectPreferredIdentifier(Advisory advisory)
|
||||
{
|
||||
if (TrySelectIdentifier(advisory.AdvisoryKey, out var preferred))
|
||||
{
|
||||
return preferred;
|
||||
}
|
||||
|
||||
foreach (var alias in advisory.Aliases)
|
||||
{
|
||||
if (TrySelectIdentifier(alias, out preferred))
|
||||
{
|
||||
return preferred;
|
||||
}
|
||||
}
|
||||
|
||||
return advisory.AdvisoryKey.Trim();
|
||||
}
|
||||
|
||||
private static bool TrySelectIdentifier(string value, out string identifier)
|
||||
{
|
||||
identifier = string.Empty;
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
if (CvePattern.IsMatch(trimmed) || GhsaPattern.IsMatch(trimmed))
|
||||
{
|
||||
identifier = trimmed;
|
||||
return true;
|
||||
}
|
||||
|
||||
identifier = trimmed;
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string SanitizeFileName(string name)
|
||||
{
|
||||
var invalid = Path.GetInvalidFileNameChars();
|
||||
Span<char> buffer = stackalloc char[name.Length];
|
||||
var count = 0;
|
||||
foreach (var ch in name)
|
||||
{
|
||||
if (ch == '/' || ch == '\\' || Array.IndexOf(invalid, ch) >= 0)
|
||||
{
|
||||
buffer[count++] = '_';
|
||||
}
|
||||
else
|
||||
{
|
||||
buffer[count++] = ch;
|
||||
}
|
||||
}
|
||||
|
||||
var sanitized = new string(buffer[..count]).Trim();
|
||||
return string.IsNullOrEmpty(sanitized) ? "advisory" : sanitized;
|
||||
}
|
||||
|
||||
private readonly record struct Layout(string[] Segments, string FileName);
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
Exporter producing a Trivy-compatible database artifact for self-hosting or offline use. v0: JSON list + metadata; v1: integrate official trivy-db builder or write BoltDB directly; pack and optionally push via ORAS.
|
||||
## Scope
|
||||
- Read canonical advisories; serialize payload for builder or intermediate; write metadata.json (generatedAt, counts).
|
||||
- Output root: exports/trivy/<yyyyMMddHHmmss>; deterministic path components.
|
||||
- OCI/Trivy expectations: layer media type application/vnd.aquasec.trivy.db.layer.v1.tar+gzip; config media type application/vnd.aquasec.trivy.config.v1+json; tag (e.g., 2).
|
||||
- Optional ORAS push; optional offline bundle (db.tar.gz + metadata.json).
|
||||
- DI: TrivyExporter + Jobs.TrivyExportJob registered by TrivyExporterDependencyInjectionRoutine.
|
||||
- Export_state recording: capture digests, counts, start/end timestamps for idempotent reruns and incremental packaging.
|
||||
## Participants
|
||||
- Storage.Mongo.AdvisoryStore as input.
|
||||
- Core scheduler runs export job; WebService/Plugins trigger it.
|
||||
- JSON exporter (optional precursor) if choosing the builder path.
|
||||
## Interfaces & contracts
|
||||
- IFeedExporter.Name = "trivy-db"; ExportAsync(IServiceProvider, CancellationToken).
|
||||
- FeedserOptions.packaging.trivy governs repo/tag/publish/offline_bundle.
|
||||
- Deterministic sorting and timestamp discipline (UTC; consider build reproducibility knobs).
|
||||
## In/Out of scope
|
||||
In: assembling builder inputs, packing tar.gz, pushing to registry when configured.
|
||||
Out: signing (external pipeline), scanner behavior.
|
||||
## Observability & security expectations
|
||||
- Metrics: export.trivy.records, size_bytes, duration, oras.push.success/fail.
|
||||
- Logs: export path, repo/tag, digest; redact credentials; backoff on push errors.
|
||||
@@ -1,25 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbExporterPlugin : IExporterPlugin
|
||||
{
|
||||
public string Name => "trivydb";
|
||||
|
||||
public bool IsAvailable(IServiceProvider services) => true;
|
||||
|
||||
public IFeedExporter Create(IServiceProvider services) => new StubExporter(Name);
|
||||
|
||||
private sealed class StubExporter : IFeedExporter
|
||||
{
|
||||
public StubExporter(string name) => Name = name;
|
||||
|
||||
public string Name { get; }
|
||||
|
||||
public Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public interface ITrivyDbBuilder
|
||||
{
|
||||
Task<TrivyDbBuilderResult> BuildAsync(
|
||||
JsonExportResult jsonTree,
|
||||
DateTimeOffset exportedAt,
|
||||
string exportId,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public interface ITrivyDbOrasPusher
|
||||
{
|
||||
Task PushAsync(string layoutPath, string reference, string exportId, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record OciDescriptor(
|
||||
[property: JsonPropertyName("mediaType")] string MediaType,
|
||||
[property: JsonPropertyName("digest")] string Digest,
|
||||
[property: JsonPropertyName("size")] long Size,
|
||||
[property: JsonPropertyName("annotations")] IReadOnlyDictionary<string, string>? Annotations = null);
|
||||
@@ -0,0 +1,8 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record OciIndex(
|
||||
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
|
||||
[property: JsonPropertyName("manifests")] IReadOnlyList<OciDescriptor> Manifests);
|
||||
@@ -0,0 +1,10 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record OciManifest(
|
||||
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
|
||||
[property: JsonPropertyName("mediaType")] string MediaType,
|
||||
[property: JsonPropertyName("config")] OciDescriptor Config,
|
||||
[property: JsonPropertyName("layers")] IReadOnlyList<OciDescriptor> Layers);
|
||||
@@ -1,14 +1,22 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Exporter.Json\StellaOps.Feedser.Exporter.Json.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Models\StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Feedser.Storage.Mongo\StellaOps.Feedser.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="8.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,13 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|Fix method name typo GetExportRoot' -> GetExportRoot|BE-Export|Exporters|DONE – `TrivyDbExportOptions.GetExportRoot` helper added with unit coverage.|
|
||||
|Implement BoltDB builder integration (v0 via trivy-db CLI)|BE-Export|Env|DONE – `TrivyDbBoltBuilder` shells `trivy-db build` against our JSON tree with deterministic packaging.|
|
||||
|Pack db.tar.gz + metadata.json|BE-Export|Exporters|DONE – Builder output re-packed with fixed timestamps and zeroed gzip mtime.|
|
||||
|ORAS push support|BE-Export|Exporters|DONE – Optional `TrivyDbOrasPusher` shells `oras cp --from-oci-layout` with configurable args/env.|
|
||||
|Offline bundle toggle|BE-Export|Exporters|DONE – Deterministic OCI layout bundle emitted when enabled.|
|
||||
|Deterministic ordering of advisories|BE-Export|Models|TODO – Sort by advisoryKey; stable array orders.|
|
||||
|End-to-end tests with small dataset|QA|Exporters|TODO – Assert media types and reproducible digests across runs.|
|
||||
|ExportState persistence & idempotence|BE-Export|Storage.Mongo|DOING – `ExportStateManager` keeps stable base export metadata; delta reset remains pending.|
|
||||
|Streamed package building to avoid large copies|BE-Export|Exporters|TODO – refactor package writer to stream without double-buffering metadata/archive payloads.|
|
||||
|Plan incremental/delta exports|BE-Export|Exporters|TODO – design reuse of existing blobs/layers when inputs unchanged instead of rewriting full trees each run.|
|
||||
@@ -0,0 +1,11 @@
|
||||
using System;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record TrivyConfigDocument(
|
||||
[property: JsonPropertyName("mediaType")] string MediaType,
|
||||
[property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt,
|
||||
[property: JsonPropertyName("databaseVersion")] string DatabaseVersion,
|
||||
[property: JsonPropertyName("databaseDigest")] string DatabaseDigest,
|
||||
[property: JsonPropertyName("databaseSize")] long DatabaseSize);
|
||||
@@ -0,0 +1,62 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbBlob
|
||||
{
|
||||
private readonly Func<CancellationToken, ValueTask<Stream>> _openReadAsync;
|
||||
|
||||
private TrivyDbBlob(Func<CancellationToken, ValueTask<Stream>> openReadAsync, long length)
|
||||
{
|
||||
_openReadAsync = openReadAsync ?? throw new ArgumentNullException(nameof(openReadAsync));
|
||||
if (length < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
}
|
||||
|
||||
Length = length;
|
||||
}
|
||||
|
||||
public long Length { get; }
|
||||
|
||||
public ValueTask<Stream> OpenReadAsync(CancellationToken cancellationToken)
|
||||
=> _openReadAsync(cancellationToken);
|
||||
|
||||
public static TrivyDbBlob FromBytes(ReadOnlyMemory<byte> payload)
|
||||
{
|
||||
if (payload.IsEmpty)
|
||||
{
|
||||
return new TrivyDbBlob(static _ => ValueTask.FromResult<Stream>(Stream.Null), 0);
|
||||
}
|
||||
|
||||
return new TrivyDbBlob(
|
||||
cancellationToken => ValueTask.FromResult<Stream>(new MemoryStream(payload.ToArray(), writable: false)),
|
||||
payload.Length);
|
||||
}
|
||||
|
||||
public static TrivyDbBlob FromFile(string path, long length)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
{
|
||||
throw new ArgumentException("File path must be provided.", nameof(path));
|
||||
}
|
||||
|
||||
if (length < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(length));
|
||||
}
|
||||
|
||||
return new TrivyDbBlob(
|
||||
cancellationToken => ValueTask.FromResult<Stream>(new FileStream(
|
||||
path,
|
||||
FileMode.Open,
|
||||
FileAccess.Read,
|
||||
FileShare.Read,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan)),
|
||||
length);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,376 @@
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Formats.Tar;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbBoltBuilder : ITrivyDbBuilder
|
||||
{
|
||||
private readonly TrivyDbExportOptions _options;
|
||||
private readonly ILogger<TrivyDbBoltBuilder> _logger;
|
||||
|
||||
public TrivyDbBoltBuilder(IOptions<TrivyDbExportOptions> options, ILogger<TrivyDbBoltBuilder> logger)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<TrivyDbBuilderResult> BuildAsync(
|
||||
JsonExportResult jsonTree,
|
||||
DateTimeOffset exportedAt,
|
||||
string exportId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(jsonTree);
|
||||
ArgumentException.ThrowIfNullOrEmpty(exportId);
|
||||
|
||||
var builderRoot = PrepareBuilderRoot(jsonTree.ExportDirectory, exportId);
|
||||
var outputDir = Path.Combine(builderRoot, "out");
|
||||
Directory.CreateDirectory(outputDir);
|
||||
|
||||
try
|
||||
{
|
||||
await RunCliAsync(jsonTree.ExportDirectory, outputDir, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
TryDeleteDirectory(builderRoot);
|
||||
throw;
|
||||
}
|
||||
|
||||
var metadataPath = Path.Combine(outputDir, "metadata.json");
|
||||
var dbPath = Path.Combine(outputDir, "trivy.db");
|
||||
|
||||
if (!File.Exists(metadataPath))
|
||||
{
|
||||
TryDeleteDirectory(builderRoot);
|
||||
throw new InvalidOperationException($"trivy-db metadata not found at '{metadataPath}'.");
|
||||
}
|
||||
|
||||
if (!File.Exists(dbPath))
|
||||
{
|
||||
TryDeleteDirectory(builderRoot);
|
||||
throw new InvalidOperationException($"trivy.db not found at '{dbPath}'.");
|
||||
}
|
||||
|
||||
var archivePath = Path.Combine(builderRoot, "db.tar.gz");
|
||||
await CreateArchiveAsync(archivePath, exportedAt, metadataPath, dbPath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var digest = await ComputeDigestAsync(archivePath, cancellationToken).ConfigureAwait(false);
|
||||
var length = new FileInfo(archivePath).Length;
|
||||
var builderMetadata = await File.ReadAllBytesAsync(metadataPath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new TrivyDbBuilderResult(
|
||||
archivePath,
|
||||
digest,
|
||||
length,
|
||||
builderMetadata,
|
||||
builderRoot);
|
||||
}
|
||||
|
||||
private string PrepareBuilderRoot(string exportDirectory, string exportId)
|
||||
{
|
||||
var root = Path.Combine(exportDirectory, $".builder-{exportId}");
|
||||
if (Directory.Exists(root))
|
||||
{
|
||||
Directory.Delete(root, recursive: true);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(root);
|
||||
return root;
|
||||
}
|
||||
|
||||
private static void TryDeleteDirectory(string directory)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(directory))
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore cleanup failures
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunCliAsync(string cacheDir, string outputDir, CancellationToken cancellationToken)
|
||||
{
|
||||
var builderOptions = _options.Builder ?? new TrivyDbBuilderOptions();
|
||||
var executable = string.IsNullOrWhiteSpace(builderOptions.ExecutablePath)
|
||||
? "trivy-db"
|
||||
: builderOptions.ExecutablePath;
|
||||
|
||||
var targets = builderOptions.OnlyUpdateTargets ?? new System.Collections.Generic.List<string>();
|
||||
var environment = builderOptions.Environment ?? new System.Collections.Generic.Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var startInfo = new ProcessStartInfo
|
||||
{
|
||||
FileName = executable,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
};
|
||||
|
||||
startInfo.ArgumentList.Add("build");
|
||||
startInfo.ArgumentList.Add("--cache-dir");
|
||||
startInfo.ArgumentList.Add(cacheDir);
|
||||
startInfo.ArgumentList.Add("--output-dir");
|
||||
startInfo.ArgumentList.Add(outputDir);
|
||||
|
||||
if (builderOptions.UpdateInterval != default)
|
||||
{
|
||||
startInfo.ArgumentList.Add("--update-interval");
|
||||
startInfo.ArgumentList.Add(ToGoDuration(builderOptions.UpdateInterval));
|
||||
}
|
||||
|
||||
if (targets.Count > 0)
|
||||
{
|
||||
foreach (var target in targets.Where(static t => !string.IsNullOrWhiteSpace(t)))
|
||||
{
|
||||
startInfo.ArgumentList.Add("--only-update");
|
||||
startInfo.ArgumentList.Add(target);
|
||||
}
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(builderOptions.WorkingDirectory))
|
||||
{
|
||||
startInfo.WorkingDirectory = builderOptions.WorkingDirectory;
|
||||
}
|
||||
|
||||
if (!builderOptions.InheritEnvironment)
|
||||
{
|
||||
startInfo.Environment.Clear();
|
||||
}
|
||||
|
||||
foreach (var kvp in environment)
|
||||
{
|
||||
startInfo.Environment[kvp.Key] = kvp.Value;
|
||||
}
|
||||
|
||||
using var process = new Process { StartInfo = startInfo, EnableRaisingEvents = false };
|
||||
|
||||
var stdOut = new StringBuilder();
|
||||
var stdErr = new StringBuilder();
|
||||
|
||||
var stdoutCompletion = new TaskCompletionSource<object?>();
|
||||
var stderrCompletion = new TaskCompletionSource<object?>();
|
||||
|
||||
process.OutputDataReceived += (_, e) =>
|
||||
{
|
||||
if (e.Data is null)
|
||||
{
|
||||
stdoutCompletion.TrySetResult(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
stdOut.AppendLine(e.Data);
|
||||
}
|
||||
};
|
||||
|
||||
process.ErrorDataReceived += (_, e) =>
|
||||
{
|
||||
if (e.Data is null)
|
||||
{
|
||||
stderrCompletion.TrySetResult(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
stdErr.AppendLine(e.Data);
|
||||
}
|
||||
};
|
||||
|
||||
_logger.LogInformation("Running {Executable} to build Trivy DB", executable);
|
||||
|
||||
try
|
||||
{
|
||||
if (!process.Start())
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to start '{executable}'.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to start '{executable}'.", ex);
|
||||
}
|
||||
|
||||
process.BeginOutputReadLine();
|
||||
process.BeginErrorReadLine();
|
||||
|
||||
using var registration = cancellationToken.Register(() =>
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!process.HasExited)
|
||||
{
|
||||
process.Kill(entireProcessTree: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore kill failures.
|
||||
}
|
||||
});
|
||||
|
||||
#if NET8_0_OR_GREATER
|
||||
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
|
||||
#else
|
||||
await Task.Run(() => process.WaitForExit(), cancellationToken).ConfigureAwait(false);
|
||||
#endif
|
||||
|
||||
await Task.WhenAll(stdoutCompletion.Task, stderrCompletion.Task).ConfigureAwait(false);
|
||||
|
||||
if (process.ExitCode != 0)
|
||||
{
|
||||
_logger.LogError("trivy-db exited with code {ExitCode}. stderr: {Stderr}", process.ExitCode, stdErr.ToString());
|
||||
throw new InvalidOperationException($"'{executable}' exited with code {process.ExitCode}.");
|
||||
}
|
||||
|
||||
if (stdOut.Length > 0)
|
||||
{
|
||||
_logger.LogDebug("trivy-db output: {StdOut}", stdOut.ToString());
|
||||
}
|
||||
|
||||
if (stdErr.Length > 0)
|
||||
{
|
||||
_logger.LogWarning("trivy-db warnings: {StdErr}", stdErr.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task CreateArchiveAsync(
|
||||
string archivePath,
|
||||
DateTimeOffset exportedAt,
|
||||
string metadataPath,
|
||||
string dbPath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
await using var archiveStream = new FileStream(
|
||||
archivePath,
|
||||
FileMode.Create,
|
||||
FileAccess.Write,
|
||||
FileShare.None,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
|
||||
await using var gzip = new GZipStream(archiveStream, CompressionLevel.SmallestSize, leaveOpen: true);
|
||||
await using var writer = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: false);
|
||||
|
||||
var timestamp = exportedAt.UtcDateTime;
|
||||
foreach (var file in EnumerateArchiveEntries(metadataPath, dbPath))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, file.Name)
|
||||
{
|
||||
ModificationTime = timestamp,
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
|
||||
};
|
||||
|
||||
await using var source = new FileStream(
|
||||
file.Path,
|
||||
FileMode.Open,
|
||||
FileAccess.Read,
|
||||
FileShare.Read,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
|
||||
entry.DataStream = source;
|
||||
writer.WriteEntry(entry);
|
||||
}
|
||||
|
||||
await writer.DisposeAsync().ConfigureAwait(false);
|
||||
await ZeroGzipMtimeAsync(archivePath, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static IEnumerable<(string Name, string Path)> EnumerateArchiveEntries(string metadataPath, string dbPath)
|
||||
{
|
||||
yield return ("metadata.json", metadataPath);
|
||||
yield return ("trivy.db", dbPath);
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeDigestAsync(string archivePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = new FileStream(
|
||||
archivePath,
|
||||
FileMode.Open,
|
||||
FileAccess.Read,
|
||||
FileShare.Read,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static async Task ZeroGzipMtimeAsync(string archivePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = new FileStream(
|
||||
archivePath,
|
||||
FileMode.Open,
|
||||
FileAccess.ReadWrite,
|
||||
FileShare.None,
|
||||
bufferSize: 8,
|
||||
options: FileOptions.Asynchronous);
|
||||
|
||||
if (stream.Length < 10)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
stream.Position = 4;
|
||||
var zeros = new byte[4];
|
||||
await stream.WriteAsync(zeros, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static string ToGoDuration(TimeSpan span)
|
||||
{
|
||||
if (span <= TimeSpan.Zero)
|
||||
{
|
||||
return "0s";
|
||||
}
|
||||
|
||||
span = span.Duration();
|
||||
var builder = new StringBuilder();
|
||||
|
||||
var totalHours = (int)span.TotalHours;
|
||||
if (totalHours > 0)
|
||||
{
|
||||
builder.Append(totalHours);
|
||||
builder.Append('h');
|
||||
}
|
||||
|
||||
var minutes = span.Minutes;
|
||||
if (minutes > 0)
|
||||
{
|
||||
builder.Append(minutes);
|
||||
builder.Append('m');
|
||||
}
|
||||
|
||||
var seconds = span.Seconds + span.Milliseconds / 1000.0;
|
||||
if (seconds > 0 || builder.Length == 0)
|
||||
{
|
||||
if (span.Milliseconds == 0)
|
||||
{
|
||||
builder.Append(span.Seconds);
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.Append(seconds.ToString("0.###", CultureInfo.InvariantCulture));
|
||||
}
|
||||
builder.Append('s');
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record TrivyDbBuilderResult(
|
||||
string ArchivePath,
|
||||
string ArchiveDigest,
|
||||
long ArchiveLength,
|
||||
ReadOnlyMemory<byte> BuilderMetadata,
|
||||
string WorkingDirectory);
|
||||
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Feedser.Core.Jobs;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbExportJob : IJob
|
||||
{
|
||||
public const string JobKind = "export:trivy-db";
|
||||
public static readonly TimeSpan DefaultTimeout = TimeSpan.FromMinutes(20);
|
||||
public static readonly TimeSpan DefaultLeaseDuration = TimeSpan.FromMinutes(10);
|
||||
|
||||
private readonly TrivyDbFeedExporter _exporter;
|
||||
private readonly ILogger<TrivyDbExportJob> _logger;
|
||||
|
||||
public TrivyDbExportJob(TrivyDbFeedExporter exporter, ILogger<TrivyDbExportJob> logger)
|
||||
{
|
||||
_exporter = exporter ?? throw new ArgumentNullException(nameof(exporter));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("Executing Trivy DB export job {RunId}", context.RunId);
|
||||
await _exporter.ExportAsync(context.Services, cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation("Completed Trivy DB export job {RunId}", context.RunId);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public enum TrivyDbExportMode
|
||||
{
|
||||
Full,
|
||||
Delta,
|
||||
Skip,
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Feedser.Exporter.Json;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbExportOptions
|
||||
{
|
||||
public string OutputRoot { get; set; } = Path.Combine("exports", "trivy");
|
||||
|
||||
public string ReferencePrefix { get; set; } = "feedser/trivy";
|
||||
|
||||
public string TagFormat { get; set; } = "yyyyMMdd'T'HHmmss'Z'";
|
||||
|
||||
public string DatabaseVersionFormat { get; set; } = "yyyyMMdd'T'HHmmss'Z'";
|
||||
|
||||
public bool KeepWorkingTree { get; set; }
|
||||
|
||||
public string? TargetRepository { get; set; }
|
||||
|
||||
public JsonExportOptions Json { get; set; } = new()
|
||||
{
|
||||
OutputRoot = Path.Combine("exports", "trivy", "tree")
|
||||
};
|
||||
|
||||
public TrivyDbBuilderOptions Builder { get; set; } = new();
|
||||
|
||||
public TrivyDbOrasOptions Oras { get; set; } = new();
|
||||
|
||||
public TrivyDbOfflineBundleOptions OfflineBundle { get; set; } = new();
|
||||
|
||||
public string GetExportRoot(string exportId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(exportId);
|
||||
var root = Path.GetFullPath(OutputRoot);
|
||||
return Path.Combine(root, exportId);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class TrivyDbBuilderOptions
|
||||
{
|
||||
public string ExecutablePath { get; set; } = "trivy-db";
|
||||
|
||||
public string? WorkingDirectory { get; set; }
|
||||
|
||||
public TimeSpan UpdateInterval { get; set; } = TimeSpan.FromHours(24);
|
||||
|
||||
public List<string> OnlyUpdateTargets { get; set; } = new();
|
||||
|
||||
public Dictionary<string, string> Environment { get; set; } = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public bool InheritEnvironment { get; set; } = true;
|
||||
}
|
||||
|
||||
public sealed class TrivyDbOrasOptions
|
||||
{
|
||||
public bool Enabled { get; set; }
|
||||
|
||||
public string ExecutablePath { get; set; } = "oras";
|
||||
|
||||
public string? WorkingDirectory { get; set; }
|
||||
|
||||
public bool InheritEnvironment { get; set; } = true;
|
||||
|
||||
public List<string> AdditionalArguments { get; set; } = new();
|
||||
|
||||
public Dictionary<string, string> Environment { get; set; } = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public bool SkipTlsVerify { get; set; }
|
||||
|
||||
public bool UseHttp { get; set; }
|
||||
}
|
||||
|
||||
public sealed class TrivyDbOfflineBundleOptions
|
||||
{
|
||||
public bool Enabled { get; set; }
|
||||
|
||||
public string? FileName { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record TrivyDbExportPlan(
|
||||
TrivyDbExportMode Mode,
|
||||
string TreeDigest,
|
||||
string? BaseExportId,
|
||||
string? BaseManifestDigest);
|
||||
@@ -0,0 +1,33 @@
|
||||
using System;
|
||||
using StellaOps.Feedser.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbExportPlanner
|
||||
{
|
||||
public TrivyDbExportPlan CreatePlan(ExportStateRecord? existingState, string treeDigest)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(treeDigest);
|
||||
|
||||
if (existingState is null)
|
||||
{
|
||||
return new TrivyDbExportPlan(TrivyDbExportMode.Full, treeDigest, BaseExportId: null, BaseManifestDigest: null);
|
||||
}
|
||||
|
||||
if (string.Equals(existingState.ExportCursor, treeDigest, StringComparison.Ordinal))
|
||||
{
|
||||
return new TrivyDbExportPlan(
|
||||
TrivyDbExportMode.Skip,
|
||||
treeDigest,
|
||||
existingState.BaseExportId,
|
||||
existingState.LastFullDigest);
|
||||
}
|
||||
|
||||
// Placeholder for future delta support – current behavior always rebuilds when tree changes.
|
||||
return new TrivyDbExportPlan(
|
||||
TrivyDbExportMode.Full,
|
||||
treeDigest,
|
||||
existingState.BaseExportId,
|
||||
existingState.LastFullDigest);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.DependencyInjection;
|
||||
using StellaOps.Feedser.Core.Jobs;
|
||||
using StellaOps.Feedser.Exporter.Json;
|
||||
using StellaOps.Feedser.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbExporterDependencyInjectionRoutine : IDependencyInjectionRoutine
|
||||
{
|
||||
private const string ConfigurationSection = "feedser:exporters:trivyDb";
|
||||
|
||||
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
services.TryAddSingleton<IJsonExportPathResolver, VulnListJsonExportPathResolver>();
|
||||
services.TryAddSingleton<ExportStateManager>();
|
||||
|
||||
services.AddOptions<TrivyDbExportOptions>()
|
||||
.Bind(configuration.GetSection(ConfigurationSection))
|
||||
.PostConfigure(static options =>
|
||||
{
|
||||
options.OutputRoot = Normalize(options.OutputRoot, Path.Combine("exports", "trivy"));
|
||||
options.Json.OutputRoot = Normalize(options.Json.OutputRoot, Path.Combine("exports", "trivy", "tree"));
|
||||
options.TagFormat = string.IsNullOrWhiteSpace(options.TagFormat) ? "yyyyMMdd'T'HHmmss'Z'" : options.TagFormat;
|
||||
options.DatabaseVersionFormat = string.IsNullOrWhiteSpace(options.DatabaseVersionFormat) ? "yyyyMMdd'T'HHmmss'Z'" : options.DatabaseVersionFormat;
|
||||
options.ReferencePrefix = string.IsNullOrWhiteSpace(options.ReferencePrefix) ? "feedser/trivy" : options.ReferencePrefix;
|
||||
});
|
||||
|
||||
services.AddSingleton<TrivyDbPackageBuilder>();
|
||||
services.AddSingleton<TrivyDbOciWriter>();
|
||||
services.AddSingleton<TrivyDbExportPlanner>();
|
||||
services.AddSingleton<ITrivyDbBuilder, TrivyDbBoltBuilder>();
|
||||
services.AddSingleton<ITrivyDbOrasPusher, TrivyDbOrasPusher>();
|
||||
services.AddSingleton<TrivyDbFeedExporter>();
|
||||
services.AddTransient<TrivyDbExportJob>();
|
||||
|
||||
services.PostConfigure<JobSchedulerOptions>(options =>
|
||||
{
|
||||
if (!options.Definitions.ContainsKey(TrivyDbExportJob.JobKind))
|
||||
{
|
||||
options.Definitions[TrivyDbExportJob.JobKind] = new JobDefinition(
|
||||
TrivyDbExportJob.JobKind,
|
||||
typeof(TrivyDbExportJob),
|
||||
TrivyDbExportJob.DefaultTimeout,
|
||||
TrivyDbExportJob.DefaultLeaseDuration,
|
||||
null,
|
||||
true);
|
||||
}
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static string Normalize(string? value, string fallback)
|
||||
=> string.IsNullOrWhiteSpace(value) ? fallback : value;
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbExporterPlugin : IExporterPlugin
|
||||
{
|
||||
public string Name => TrivyDbFeedExporter.ExporterName;
|
||||
|
||||
public bool IsAvailable(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return services.GetService<IAdvisoryStore>() is not null;
|
||||
}
|
||||
|
||||
public IFeedExporter Create(IServiceProvider services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
return ActivatorUtilities.CreateInstance<TrivyDbFeedExporter>(services);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,365 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Formats.Tar;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Feedser.Exporter.Json;
|
||||
using StellaOps.Feedser.Storage.Mongo.Advisories;
|
||||
using StellaOps.Feedser.Storage.Mongo.Exporting;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbFeedExporter : IFeedExporter
|
||||
{
|
||||
public const string ExporterName = "trivy-db";
|
||||
public const string ExporterId = "export:trivy-db";
|
||||
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly IJsonExportPathResolver _pathResolver;
|
||||
private readonly TrivyDbExportOptions _options;
|
||||
private readonly TrivyDbPackageBuilder _packageBuilder;
|
||||
private readonly TrivyDbOciWriter _ociWriter;
|
||||
private readonly ExportStateManager _stateManager;
|
||||
private readonly TrivyDbExportPlanner _exportPlanner;
|
||||
private readonly ITrivyDbBuilder _builder;
|
||||
private readonly ITrivyDbOrasPusher _orasPusher;
|
||||
private readonly ILogger<TrivyDbFeedExporter> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly string _exporterVersion;
|
||||
|
||||
public TrivyDbFeedExporter(
|
||||
IAdvisoryStore advisoryStore,
|
||||
IJsonExportPathResolver pathResolver,
|
||||
IOptions<TrivyDbExportOptions> options,
|
||||
TrivyDbPackageBuilder packageBuilder,
|
||||
TrivyDbOciWriter ociWriter,
|
||||
ExportStateManager stateManager,
|
||||
TrivyDbExportPlanner exportPlanner,
|
||||
ITrivyDbBuilder builder,
|
||||
ITrivyDbOrasPusher orasPusher,
|
||||
ILogger<TrivyDbFeedExporter> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_packageBuilder = packageBuilder ?? throw new ArgumentNullException(nameof(packageBuilder));
|
||||
_ociWriter = ociWriter ?? throw new ArgumentNullException(nameof(ociWriter));
|
||||
_stateManager = stateManager ?? throw new ArgumentNullException(nameof(stateManager));
|
||||
_exportPlanner = exportPlanner ?? throw new ArgumentNullException(nameof(exportPlanner));
|
||||
_builder = builder ?? throw new ArgumentNullException(nameof(builder));
|
||||
_orasPusher = orasPusher ?? throw new ArgumentNullException(nameof(orasPusher));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_exporterVersion = ExporterVersion.GetVersion(typeof(TrivyDbFeedExporter));
|
||||
}
|
||||
|
||||
public string Name => ExporterName;
|
||||
|
||||
public async Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
var exportedAt = _timeProvider.GetUtcNow();
|
||||
var exportId = exportedAt.ToString(_options.TagFormat, CultureInfo.InvariantCulture);
|
||||
var reference = $"{_options.ReferencePrefix}:{exportId}";
|
||||
|
||||
_logger.LogInformation("Starting Trivy DB export {ExportId}", exportId);
|
||||
|
||||
var jsonBuilder = new JsonExportSnapshotBuilder(_options.Json, _pathResolver);
|
||||
var advisoryStream = _advisoryStore.StreamAsync(cancellationToken);
|
||||
var jsonResult = await jsonBuilder.WriteAsync(advisoryStream, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Prepared Trivy JSON tree {ExportId} with {AdvisoryCount} advisories ({Bytes} bytes)",
|
||||
exportId,
|
||||
jsonResult.AdvisoryCount,
|
||||
jsonResult.TotalBytes);
|
||||
|
||||
var treeDigest = ExportDigestCalculator.ComputeTreeDigest(jsonResult);
|
||||
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
|
||||
var plan = _exportPlanner.CreatePlan(existingState, treeDigest);
|
||||
|
||||
if (plan.Mode == TrivyDbExportMode.Skip)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Trivy DB export {ExportId} unchanged from base {BaseExport}; skipping OCI packaging.",
|
||||
exportId,
|
||||
plan.BaseExportId ?? "(none)");
|
||||
|
||||
if (!_options.KeepWorkingTree)
|
||||
{
|
||||
TryDeleteDirectory(jsonResult.ExportDirectory);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
var builderResult = await _builder.BuildAsync(jsonResult, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
|
||||
var metadataBytes = CreateMetadataJson(builderResult.BuilderMetadata, treeDigest, jsonResult, exportedAt);
|
||||
|
||||
try
|
||||
{
|
||||
var package = _packageBuilder.BuildPackage(new TrivyDbPackageRequest(
|
||||
metadataBytes,
|
||||
builderResult.ArchivePath,
|
||||
builderResult.ArchiveDigest,
|
||||
builderResult.ArchiveLength,
|
||||
exportedAt,
|
||||
exportedAt.ToString(_options.DatabaseVersionFormat, CultureInfo.InvariantCulture)));
|
||||
|
||||
var destination = _options.GetExportRoot(exportId);
|
||||
var ociResult = await _ociWriter.WriteAsync(package, destination, reference, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (_options.Oras.Enabled)
|
||||
{
|
||||
await _orasPusher.PushAsync(destination, reference, exportId, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Trivy DB export {ExportId} wrote manifest {ManifestDigest}",
|
||||
exportId,
|
||||
ociResult.ManifestDigest);
|
||||
|
||||
await _stateManager.StoreFullExportAsync(
|
||||
ExporterId,
|
||||
exportId,
|
||||
ociResult.ManifestDigest,
|
||||
cursor: treeDigest,
|
||||
targetRepository: _options.TargetRepository,
|
||||
exporterVersion: _exporterVersion,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
await CreateOfflineBundleAsync(destination, exportId, exportedAt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(builderResult.WorkingDirectory);
|
||||
}
|
||||
|
||||
if (!_options.KeepWorkingTree)
|
||||
{
|
||||
TryDeleteDirectory(jsonResult.ExportDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] CreateMetadataJson(
|
||||
ReadOnlyMemory<byte> builderMetadata,
|
||||
string treeDigest,
|
||||
JsonExportResult result,
|
||||
DateTimeOffset exportedAt)
|
||||
{
|
||||
var metadata = new TrivyMetadata
|
||||
{
|
||||
GeneratedAt = exportedAt.UtcDateTime,
|
||||
AdvisoryCount = result.AdvisoryCount,
|
||||
TreeDigest = treeDigest,
|
||||
TreeBytes = result.TotalBytes,
|
||||
ExporterVersion = _exporterVersion,
|
||||
Builder = ParseBuilderMetadata(builderMetadata.Span),
|
||||
};
|
||||
|
||||
return JsonSerializer.SerializeToUtf8Bytes(metadata, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false,
|
||||
});
|
||||
}
|
||||
|
||||
private static BuilderMetadata? ParseBuilderMetadata(ReadOnlySpan<byte> payload)
|
||||
{
|
||||
if (payload.IsEmpty)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<BuilderMetadata>(payload, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
});
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CreateOfflineBundleAsync(string layoutPath, string exportId, DateTimeOffset exportedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
if (!_options.OfflineBundle.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var parent = Path.GetDirectoryName(layoutPath) ?? layoutPath;
|
||||
var fileName = string.IsNullOrWhiteSpace(_options.OfflineBundle.FileName)
|
||||
? $"{exportId}.offline.tar.gz"
|
||||
: _options.OfflineBundle.FileName.Replace("{exportId}", exportId, StringComparison.Ordinal);
|
||||
|
||||
var bundlePath = Path.IsPathRooted(fileName) ? fileName : Path.Combine(parent, fileName);
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(bundlePath)!);
|
||||
|
||||
if (File.Exists(bundlePath))
|
||||
{
|
||||
File.Delete(bundlePath);
|
||||
}
|
||||
|
||||
var normalizedRoot = Path.GetFullPath(layoutPath);
|
||||
var directories = Directory.GetDirectories(normalizedRoot, "*", SearchOption.AllDirectories)
|
||||
.Select(dir => NormalizeTarPath(normalizedRoot, dir) + "/")
|
||||
.OrderBy(static path => path, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var files = Directory.GetFiles(normalizedRoot, "*", SearchOption.AllDirectories)
|
||||
.Select(file => NormalizeTarPath(normalizedRoot, file))
|
||||
.OrderBy(static path => path, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
await using (var archiveStream = new FileStream(
|
||||
bundlePath,
|
||||
FileMode.Create,
|
||||
FileAccess.Write,
|
||||
FileShare.None,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan))
|
||||
await using (var gzip = new GZipStream(archiveStream, CompressionLevel.SmallestSize, leaveOpen: true))
|
||||
await using (var writer = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: false))
|
||||
{
|
||||
var timestamp = exportedAt.UtcDateTime;
|
||||
|
||||
foreach (var directory in directories)
|
||||
{
|
||||
var entry = new PaxTarEntry(TarEntryType.Directory, directory)
|
||||
{
|
||||
ModificationTime = timestamp,
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
|
||||
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
|
||||
UnixFileMode.OtherRead | UnixFileMode.OtherExecute,
|
||||
};
|
||||
|
||||
writer.WriteEntry(entry);
|
||||
}
|
||||
|
||||
foreach (var relativePath in files)
|
||||
{
|
||||
var fullPath = Path.Combine(normalizedRoot, relativePath.Replace('/', Path.DirectorySeparatorChar));
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, relativePath)
|
||||
{
|
||||
ModificationTime = timestamp,
|
||||
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite |
|
||||
UnixFileMode.GroupRead |
|
||||
UnixFileMode.OtherRead,
|
||||
};
|
||||
|
||||
await using var source = new FileStream(
|
||||
fullPath,
|
||||
FileMode.Open,
|
||||
FileAccess.Read,
|
||||
FileShare.Read,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
|
||||
entry.DataStream = source;
|
||||
writer.WriteEntry(entry);
|
||||
}
|
||||
}
|
||||
|
||||
await ZeroGzipMtimeAsync(bundlePath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false);
|
||||
var length = new FileInfo(bundlePath).Length;
|
||||
_logger.LogInformation("Wrote offline bundle {BundlePath} ({Length} bytes, digest {Digest})", bundlePath, length, digest);
|
||||
}
|
||||
|
||||
private static void TryDeleteDirectory(string directory)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(directory))
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Best effort cleanup – ignore failures.
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task ZeroGzipMtimeAsync(string archivePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = new FileStream(
|
||||
archivePath,
|
||||
FileMode.Open,
|
||||
FileAccess.ReadWrite,
|
||||
FileShare.None,
|
||||
bufferSize: 8,
|
||||
options: FileOptions.Asynchronous);
|
||||
|
||||
if (stream.Length < 10)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
stream.Position = 4;
|
||||
var zeros = new byte[4];
|
||||
await stream.WriteAsync(zeros, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeSha256Async(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = new FileStream(
|
||||
path,
|
||||
FileMode.Open,
|
||||
FileAccess.Read,
|
||||
FileShare.Read,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static string NormalizeTarPath(string root, string fullPath)
|
||||
{
|
||||
var relative = Path.GetRelativePath(root, fullPath);
|
||||
var normalized = relative.Replace(Path.DirectorySeparatorChar, '/');
|
||||
return string.IsNullOrEmpty(normalized) ? "." : normalized;
|
||||
}
|
||||
|
||||
private sealed class TrivyMetadata
|
||||
{
|
||||
public DateTime GeneratedAt { get; set; }
|
||||
|
||||
public int AdvisoryCount { get; set; }
|
||||
|
||||
public string TreeDigest { get; set; } = string.Empty;
|
||||
|
||||
public long TreeBytes { get; set; }
|
||||
|
||||
public string ExporterVersion { get; set; } = string.Empty;
|
||||
|
||||
public BuilderMetadata? Builder { get; set; }
|
||||
}
|
||||
|
||||
private sealed class BuilderMetadata
|
||||
{
|
||||
[JsonPropertyName("Version")]
|
||||
public int Version { get; set; }
|
||||
|
||||
public DateTime NextUpdate { get; set; }
|
||||
|
||||
public DateTime UpdatedAt { get; set; }
|
||||
|
||||
public DateTime? DownloadedAt { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public static class TrivyDbMediaTypes
|
||||
{
|
||||
public const string OciManifest = "application/vnd.oci.image.manifest.v1+json";
|
||||
public const string OciImageIndex = "application/vnd.oci.image.index.v1+json";
|
||||
public const string TrivyConfig = "application/vnd.aquasec.trivy.config.v1+json";
|
||||
public const string TrivyLayer = "application/vnd.aquasec.trivy.db.layer.v1.tar+gzip";
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record TrivyDbOciWriteResult(
|
||||
string RootDirectory,
|
||||
string ManifestDigest,
|
||||
IReadOnlyCollection<string> BlobDigests);
|
||||
@@ -0,0 +1,172 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
/// <summary>
|
||||
/// Writes a Trivy DB package to an OCI image layout directory with deterministic content.
|
||||
/// </summary>
|
||||
public sealed class TrivyDbOciWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
private static readonly byte[] OciLayoutBytes = Encoding.UTF8.GetBytes("{\"imageLayoutVersion\":\"1.0.0\"}");
|
||||
|
||||
public async Task<TrivyDbOciWriteResult> WriteAsync(
|
||||
TrivyDbPackage package,
|
||||
string destination,
|
||||
string reference,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (package is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(package));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(destination))
|
||||
{
|
||||
throw new ArgumentException("Destination directory must be provided.", nameof(destination));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(reference))
|
||||
{
|
||||
throw new ArgumentException("Reference tag must be provided.", nameof(reference));
|
||||
}
|
||||
|
||||
var root = Path.GetFullPath(destination);
|
||||
if (Directory.Exists(root))
|
||||
{
|
||||
Directory.Delete(root, recursive: true);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(root);
|
||||
var timestamp = package.Config.GeneratedAt.UtcDateTime;
|
||||
|
||||
await WriteFileAsync(Path.Combine(root, "metadata.json"), package.MetadataJson.ToArray(), timestamp, cancellationToken).ConfigureAwait(false);
|
||||
await WriteFileAsync(Path.Combine(root, "oci-layout"), OciLayoutBytes, timestamp, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var blobsRoot = Path.Combine(root, "blobs", "sha256");
|
||||
Directory.CreateDirectory(blobsRoot);
|
||||
Directory.SetLastWriteTimeUtc(Path.GetDirectoryName(blobsRoot)!, timestamp);
|
||||
Directory.SetLastWriteTimeUtc(blobsRoot, timestamp);
|
||||
|
||||
var writtenDigests = new HashSet<string>(StringComparer.Ordinal);
|
||||
foreach (var pair in package.Blobs)
|
||||
{
|
||||
if (writtenDigests.Add(pair.Key))
|
||||
{
|
||||
await WriteBlobAsync(blobsRoot, pair.Key, pair.Value, timestamp, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(package.Manifest, SerializerOptions);
|
||||
var manifestDigest = ComputeDigest(manifestBytes);
|
||||
if (writtenDigests.Add(manifestDigest))
|
||||
{
|
||||
await WriteBlobAsync(blobsRoot, manifestDigest, TrivyDbBlob.FromBytes(manifestBytes), timestamp, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var manifestDescriptor = new OciDescriptor(
|
||||
TrivyDbMediaTypes.OciManifest,
|
||||
manifestDigest,
|
||||
manifestBytes.LongLength,
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["org.opencontainers.image.ref.name"] = reference,
|
||||
});
|
||||
var index = new OciIndex(2, new[] { manifestDescriptor });
|
||||
var indexBytes = JsonSerializer.SerializeToUtf8Bytes(index, SerializerOptions);
|
||||
await WriteFileAsync(Path.Combine(root, "index.json"), indexBytes, timestamp, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Directory.SetLastWriteTimeUtc(root, timestamp);
|
||||
|
||||
var blobDigests = writtenDigests.ToArray();
|
||||
Array.Sort(blobDigests, StringComparer.Ordinal);
|
||||
return new TrivyDbOciWriteResult(root, manifestDigest, blobDigests);
|
||||
}
|
||||
|
||||
private static async Task WriteFileAsync(string path, byte[] bytes, DateTime utcTimestamp, CancellationToken cancellationToken)
|
||||
{
|
||||
var directory = Path.GetDirectoryName(path);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
Directory.SetLastWriteTimeUtc(directory, utcTimestamp);
|
||||
}
|
||||
|
||||
await File.WriteAllBytesAsync(path, bytes, cancellationToken).ConfigureAwait(false);
|
||||
File.SetLastWriteTimeUtc(path, utcTimestamp);
|
||||
}
|
||||
|
||||
private static async Task WriteBlobAsync(string blobsRoot, string digest, TrivyDbBlob blob, DateTime utcTimestamp, CancellationToken cancellationToken)
|
||||
{
|
||||
var fileName = ResolveDigestFileName(digest);
|
||||
var path = Path.Combine(blobsRoot, fileName);
|
||||
var directory = Path.GetDirectoryName(path);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
Directory.SetLastWriteTimeUtc(directory, utcTimestamp);
|
||||
}
|
||||
|
||||
await using var source = await blob.OpenReadAsync(cancellationToken).ConfigureAwait(false);
|
||||
await using var destination = new FileStream(
|
||||
path,
|
||||
FileMode.Create,
|
||||
FileAccess.Write,
|
||||
FileShare.None,
|
||||
bufferSize: 81920,
|
||||
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
|
||||
|
||||
await source.CopyToAsync(destination, cancellationToken).ConfigureAwait(false);
|
||||
await destination.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
File.SetLastWriteTimeUtc(path, utcTimestamp);
|
||||
}
|
||||
|
||||
private static string ResolveDigestFileName(string digest)
|
||||
{
|
||||
if (!digest.StartsWith("sha256:", StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException($"Only sha256 digests are supported. Received '{digest}'.");
|
||||
}
|
||||
|
||||
var hex = digest[7..];
|
||||
if (hex.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Digest hex component cannot be empty.");
|
||||
}
|
||||
|
||||
return hex;
|
||||
}
|
||||
|
||||
private static string ComputeDigest(ReadOnlySpan<byte> payload)
|
||||
{
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(payload);
|
||||
var hex = Convert.ToHexString(hash);
|
||||
Span<char> buffer = stackalloc char[7 + hex.Length]; // "sha256:" + hex
|
||||
buffer[0] = 's';
|
||||
buffer[1] = 'h';
|
||||
buffer[2] = 'a';
|
||||
buffer[3] = '2';
|
||||
buffer[4] = '5';
|
||||
buffer[5] = '6';
|
||||
buffer[6] = ':';
|
||||
for (var i = 0; i < hex.Length; i++)
|
||||
{
|
||||
buffer[7 + i] = char.ToLowerInvariant(hex[i]);
|
||||
}
|
||||
|
||||
return new string(buffer);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,209 @@
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbOrasPusher : ITrivyDbOrasPusher
|
||||
{
|
||||
private readonly TrivyDbExportOptions _options;
|
||||
private readonly ILogger<TrivyDbOrasPusher> _logger;
|
||||
|
||||
public TrivyDbOrasPusher(IOptions<TrivyDbExportOptions> options, ILogger<TrivyDbOrasPusher> logger)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task PushAsync(string layoutPath, string reference, string exportId, CancellationToken cancellationToken)
|
||||
{
|
||||
var orasOptions = _options.Oras;
|
||||
if (!orasOptions.Enabled)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(reference))
|
||||
{
|
||||
throw new InvalidOperationException("ORAS push requested but reference is empty.");
|
||||
}
|
||||
|
||||
if (!Directory.Exists(layoutPath))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"OCI layout directory '{layoutPath}' does not exist.");
|
||||
}
|
||||
|
||||
var executable = string.IsNullOrWhiteSpace(orasOptions.ExecutablePath) ? "oras" : orasOptions.ExecutablePath;
|
||||
var tag = ResolveTag(reference, exportId);
|
||||
var layoutReference = $"{layoutPath}:{tag}";
|
||||
|
||||
var startInfo = new ProcessStartInfo
|
||||
{
|
||||
FileName = executable,
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
};
|
||||
|
||||
startInfo.ArgumentList.Add("cp");
|
||||
startInfo.ArgumentList.Add("--from-oci-layout");
|
||||
startInfo.ArgumentList.Add(layoutReference);
|
||||
if (orasOptions.SkipTlsVerify)
|
||||
{
|
||||
startInfo.ArgumentList.Add("--insecure");
|
||||
}
|
||||
if (orasOptions.UseHttp)
|
||||
{
|
||||
startInfo.ArgumentList.Add("--plain-http");
|
||||
}
|
||||
|
||||
if (orasOptions.AdditionalArguments is { Count: > 0 })
|
||||
{
|
||||
foreach (var arg in orasOptions.AdditionalArguments)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(arg))
|
||||
{
|
||||
startInfo.ArgumentList.Add(arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
startInfo.ArgumentList.Add(reference);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(orasOptions.WorkingDirectory))
|
||||
{
|
||||
startInfo.WorkingDirectory = orasOptions.WorkingDirectory;
|
||||
}
|
||||
|
||||
if (!orasOptions.InheritEnvironment)
|
||||
{
|
||||
startInfo.Environment.Clear();
|
||||
}
|
||||
|
||||
if (orasOptions.Environment is { Count: > 0 })
|
||||
{
|
||||
foreach (var kvp in orasOptions.Environment)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(kvp.Key))
|
||||
{
|
||||
startInfo.Environment[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
using var process = new Process { StartInfo = startInfo };
|
||||
var stdout = new StringBuilder();
|
||||
var stderr = new StringBuilder();
|
||||
var stdoutCompletion = new TaskCompletionSource<object?>();
|
||||
var stderrCompletion = new TaskCompletionSource<object?>();
|
||||
|
||||
process.OutputDataReceived += (_, e) =>
|
||||
{
|
||||
if (e.Data is null)
|
||||
{
|
||||
stdoutCompletion.TrySetResult(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
stdout.AppendLine(e.Data);
|
||||
}
|
||||
};
|
||||
|
||||
process.ErrorDataReceived += (_, e) =>
|
||||
{
|
||||
if (e.Data is null)
|
||||
{
|
||||
stderrCompletion.TrySetResult(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
stderr.AppendLine(e.Data);
|
||||
}
|
||||
};
|
||||
|
||||
_logger.LogInformation("Pushing Trivy DB export {ExportId} to {Reference} using {Executable}", exportId, reference, executable);
|
||||
|
||||
try
|
||||
{
|
||||
if (!process.Start())
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to start '{executable}'.");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to start '{executable}'.", ex);
|
||||
}
|
||||
|
||||
process.BeginOutputReadLine();
|
||||
process.BeginErrorReadLine();
|
||||
|
||||
using var registration = cancellationToken.Register(() =>
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!process.HasExited)
|
||||
{
|
||||
process.Kill(entireProcessTree: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
});
|
||||
|
||||
#if NET8_0_OR_GREATER
|
||||
await process.WaitForExitAsync(cancellationToken).ConfigureAwait(false);
|
||||
#else
|
||||
await Task.Run(() => process.WaitForExit(), cancellationToken).ConfigureAwait(false);
|
||||
#endif
|
||||
|
||||
await Task.WhenAll(stdoutCompletion.Task, stderrCompletion.Task).ConfigureAwait(false);
|
||||
|
||||
if (process.ExitCode != 0)
|
||||
{
|
||||
_logger.LogError("ORAS push for {Reference} failed with code {Code}. stderr: {Stderr}", reference, process.ExitCode, stderr.ToString());
|
||||
throw new InvalidOperationException($"'{executable}' exited with code {process.ExitCode}.");
|
||||
}
|
||||
|
||||
if (stdout.Length > 0)
|
||||
{
|
||||
_logger.LogDebug("ORAS push output: {Stdout}", stdout.ToString());
|
||||
}
|
||||
|
||||
if (stderr.Length > 0)
|
||||
{
|
||||
_logger.LogWarning("ORAS push warnings: {Stderr}", stderr.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private static string ResolveTag(string reference, string fallback)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(reference))
|
||||
{
|
||||
return fallback;
|
||||
}
|
||||
|
||||
var atIndex = reference.IndexOf('@');
|
||||
if (atIndex >= 0)
|
||||
{
|
||||
reference = reference[..atIndex];
|
||||
}
|
||||
|
||||
var slashIndex = reference.LastIndexOf('/');
|
||||
var colonIndex = reference.LastIndexOf(':');
|
||||
if (colonIndex > slashIndex && colonIndex >= 0)
|
||||
{
|
||||
return reference[(colonIndex + 1)..];
|
||||
}
|
||||
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record TrivyDbPackage(
|
||||
OciManifest Manifest,
|
||||
TrivyConfigDocument Config,
|
||||
IReadOnlyDictionary<string, TrivyDbBlob> Blobs,
|
||||
ReadOnlyMemory<byte> MetadataJson);
|
||||
@@ -0,0 +1,116 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed class TrivyDbPackageBuilder
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
public TrivyDbPackage BuildPackage(TrivyDbPackageRequest request)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(request));
|
||||
}
|
||||
|
||||
if (request.MetadataJson.IsEmpty)
|
||||
{
|
||||
throw new ArgumentException("Metadata JSON payload must be provided.", nameof(request));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.DatabaseArchivePath))
|
||||
{
|
||||
throw new ArgumentException("Database archive path must be provided.", nameof(request));
|
||||
}
|
||||
|
||||
if (!File.Exists(request.DatabaseArchivePath))
|
||||
{
|
||||
throw new FileNotFoundException("Database archive path not found.", request.DatabaseArchivePath);
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.DatabaseDigest))
|
||||
{
|
||||
throw new ArgumentException("Database archive digest must be provided.", nameof(request));
|
||||
}
|
||||
|
||||
if (request.DatabaseLength < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(request.DatabaseLength));
|
||||
}
|
||||
|
||||
var metadataBytes = request.MetadataJson;
|
||||
var generatedAt = request.GeneratedAt.ToUniversalTime();
|
||||
var configDocument = new TrivyConfigDocument(
|
||||
TrivyDbMediaTypes.TrivyConfig,
|
||||
generatedAt,
|
||||
request.DatabaseVersion,
|
||||
request.DatabaseDigest,
|
||||
request.DatabaseLength);
|
||||
|
||||
var configBytes = JsonSerializer.SerializeToUtf8Bytes(configDocument, SerializerOptions);
|
||||
var configDigest = ComputeDigest(configBytes);
|
||||
|
||||
var configDescriptor = new OciDescriptor(
|
||||
TrivyDbMediaTypes.TrivyConfig,
|
||||
configDigest,
|
||||
configBytes.LongLength,
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["org.opencontainers.image.title"] = "config.json",
|
||||
});
|
||||
|
||||
var layerDescriptor = new OciDescriptor(
|
||||
TrivyDbMediaTypes.TrivyLayer,
|
||||
request.DatabaseDigest,
|
||||
request.DatabaseLength,
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["org.opencontainers.image.title"] = "db.tar.gz",
|
||||
});
|
||||
|
||||
var manifest = new OciManifest(
|
||||
2,
|
||||
TrivyDbMediaTypes.OciManifest,
|
||||
configDescriptor,
|
||||
ImmutableArray.Create(layerDescriptor));
|
||||
|
||||
var blobs = new Dictionary<string, TrivyDbBlob>(StringComparer.Ordinal)
|
||||
{
|
||||
[configDigest] = TrivyDbBlob.FromBytes(configBytes),
|
||||
[request.DatabaseDigest] = TrivyDbBlob.FromFile(request.DatabaseArchivePath, request.DatabaseLength),
|
||||
};
|
||||
|
||||
return new TrivyDbPackage(manifest, configDocument, blobs, metadataBytes);
|
||||
}
|
||||
|
||||
private static string ComputeDigest(ReadOnlySpan<byte> payload)
|
||||
{
|
||||
var hash = SHA256.HashData(payload);
|
||||
var hex = Convert.ToHexString(hash);
|
||||
Span<char> buffer = stackalloc char[7 + hex.Length]; // "sha256:" + hex
|
||||
buffer[0] = 's';
|
||||
buffer[1] = 'h';
|
||||
buffer[2] = 'a';
|
||||
buffer[3] = '2';
|
||||
buffer[4] = '5';
|
||||
buffer[5] = '6';
|
||||
buffer[6] = ':';
|
||||
for (var i = 0; i < hex.Length; i++)
|
||||
{
|
||||
buffer[7 + i] = char.ToLowerInvariant(hex[i]);
|
||||
}
|
||||
|
||||
return new string(buffer);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Feedser.Exporter.TrivyDb;
|
||||
|
||||
public sealed record TrivyDbPackageRequest(
|
||||
ReadOnlyMemory<byte> MetadataJson,
|
||||
string DatabaseArchivePath,
|
||||
string DatabaseDigest,
|
||||
long DatabaseLength,
|
||||
DateTimeOffset GeneratedAt,
|
||||
string DatabaseVersion);
|
||||
25
src/StellaOps.Feedser/StellaOps.Feedser.Merge/AGENTS.md
Normal file
25
src/StellaOps.Feedser/StellaOps.Feedser.Merge/AGENTS.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
Deterministic merge and reconciliation engine; builds identity graph via aliases; applies precedence (PSIRT/OVAL > NVD; KEV flag only; regional feeds enrich); produces canonical advisory JSON and merge_event audit trail.
|
||||
## Scope
|
||||
- Identity: resolve advisory_key (prefer CVE, else PSIRT/Distro/JVN/BDU/GHSA/ICSA); unify aliases; detect collisions.
|
||||
- Precedence: override rules for affected ranges (vendor PSIRT/OVAL over registry), enrichment-only feeds (CERTs/JVN/RU-CERT), KEV toggles exploitKnown only.
|
||||
- Range comparers: RPM NEVRA comparer (epoch:version-release), Debian EVR comparer, SemVer range resolver; platform-aware selection.
|
||||
- Merge algorithm: stable ordering, pure functions, idempotence; compute beforeHash/afterHash over canonical form; write merge_event.
|
||||
- Conflict reporting: counters and logs for identity conflicts, reference merges, range overrides.
|
||||
## Participants
|
||||
- Storage.Mongo (reads raw mapped advisories, writes merged docs plus merge_event).
|
||||
- Models (canonical types).
|
||||
- Exporters (consume merged canonical).
|
||||
- Core/WebService (jobs: merge:run, maybe per-kind).
|
||||
## Interfaces & contracts
|
||||
- AdvisoryMergeService.MergeAsync(ids or byKind): returns summary {processed, merged, overrides, conflicts}.
|
||||
- Precedence table configurable but with sane defaults: RedHat/Ubuntu/Debian/SUSE > Vendor PSIRT > GHSA/OSV > NVD; CERTs enrich; KEV sets flags.
|
||||
- Range selection uses comparers: NevraComparer, DebEvrComparer, SemVerRange; deterministic tie-breakers.
|
||||
- Provenance propagation merges unique entries; references deduped by (url, type).
|
||||
## In/Out of scope
|
||||
In: merge logic, precedence policy, hashing, event records, comparers.
|
||||
Out: fetching/parsing, exporter packaging, signing.
|
||||
## Observability & security expectations
|
||||
- Metrics: merge.delta.count, merge.identity.conflicts, merge.range.overrides, merge.duration_ms.
|
||||
- Logs: decisions (why replaced), keys involved, hashes; avoid dumping large blobs; redact secrets (none expected).
|
||||
@@ -1,6 +1 @@
|
||||
namespace StellaOps.Feedser.Merge;
|
||||
|
||||
public class Class1
|
||||
{
|
||||
|
||||
}
|
||||
// Intentionally left blank; types moved into dedicated files.
|
||||
|
||||
@@ -0,0 +1,232 @@
|
||||
namespace StellaOps.Feedser.Merge.Comparers;
|
||||
|
||||
using System;
|
||||
using StellaOps.Feedser.Normalization.Distro;
|
||||
|
||||
public sealed class DebianEvrComparer : IComparer<DebianEvr>, IComparer<string>
|
||||
{
|
||||
public static DebianEvrComparer Instance { get; } = new();
|
||||
|
||||
private DebianEvrComparer()
|
||||
{
|
||||
}
|
||||
|
||||
public int Compare(string? x, string? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (x is null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (y is null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
var xParsed = DebianEvr.TryParse(x, out var xEvr);
|
||||
var yParsed = DebianEvr.TryParse(y, out var yEvr);
|
||||
|
||||
if (xParsed && yParsed)
|
||||
{
|
||||
return Compare(xEvr, yEvr);
|
||||
}
|
||||
|
||||
if (xParsed)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (yParsed)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
return string.Compare(x, y, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public int Compare(DebianEvr? x, DebianEvr? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (x is null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (y is null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
var compare = x.Epoch.CompareTo(y.Epoch);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = CompareSegment(x.Version, y.Version);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = CompareSegment(x.Revision, y.Revision);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static int CompareSegment(string left, string right)
|
||||
{
|
||||
var i = 0;
|
||||
var j = 0;
|
||||
|
||||
while (i < left.Length || j < right.Length)
|
||||
{
|
||||
while (i < left.Length && !IsAlphaNumeric(left[i]) && left[i] != '~')
|
||||
{
|
||||
i++;
|
||||
}
|
||||
|
||||
while (j < right.Length && !IsAlphaNumeric(right[j]) && right[j] != '~')
|
||||
{
|
||||
j++;
|
||||
}
|
||||
|
||||
var leftChar = i < left.Length ? left[i] : '\0';
|
||||
var rightChar = j < right.Length ? right[j] : '\0';
|
||||
|
||||
if (leftChar == '~' || rightChar == '~')
|
||||
{
|
||||
if (leftChar != rightChar)
|
||||
{
|
||||
return leftChar == '~' ? -1 : 1;
|
||||
}
|
||||
|
||||
i += leftChar == '~' ? 1 : 0;
|
||||
j += rightChar == '~' ? 1 : 0;
|
||||
continue;
|
||||
}
|
||||
|
||||
var leftIsDigit = char.IsDigit(leftChar);
|
||||
var rightIsDigit = char.IsDigit(rightChar);
|
||||
|
||||
if (leftIsDigit && rightIsDigit)
|
||||
{
|
||||
var leftStart = i;
|
||||
while (i < left.Length && char.IsDigit(left[i]))
|
||||
{
|
||||
i++;
|
||||
}
|
||||
|
||||
var rightStart = j;
|
||||
while (j < right.Length && char.IsDigit(right[j]))
|
||||
{
|
||||
j++;
|
||||
}
|
||||
|
||||
var leftTrimmed = leftStart;
|
||||
while (leftTrimmed < i && left[leftTrimmed] == '0')
|
||||
{
|
||||
leftTrimmed++;
|
||||
}
|
||||
|
||||
var rightTrimmed = rightStart;
|
||||
while (rightTrimmed < j && right[rightTrimmed] == '0')
|
||||
{
|
||||
rightTrimmed++;
|
||||
}
|
||||
|
||||
var leftLength = i - leftTrimmed;
|
||||
var rightLength = j - rightTrimmed;
|
||||
|
||||
if (leftLength != rightLength)
|
||||
{
|
||||
return leftLength.CompareTo(rightLength);
|
||||
}
|
||||
|
||||
var comparison = left.AsSpan(leftTrimmed, leftLength)
|
||||
.CompareTo(right.AsSpan(rightTrimmed, rightLength), StringComparison.Ordinal);
|
||||
if (comparison != 0)
|
||||
{
|
||||
return comparison;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (leftIsDigit)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (rightIsDigit)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
var leftOrder = CharOrder(leftChar);
|
||||
var rightOrder = CharOrder(rightChar);
|
||||
|
||||
var orderComparison = leftOrder.CompareTo(rightOrder);
|
||||
if (orderComparison != 0)
|
||||
{
|
||||
return orderComparison;
|
||||
}
|
||||
|
||||
if (leftChar != rightChar)
|
||||
{
|
||||
return leftChar.CompareTo(rightChar);
|
||||
}
|
||||
|
||||
if (leftChar == '\0')
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
i++;
|
||||
j++;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static bool IsAlphaNumeric(char value)
|
||||
=> char.IsLetterOrDigit(value);
|
||||
|
||||
private static int CharOrder(char value)
|
||||
{
|
||||
if (value == '\0')
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (value == '~')
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (char.IsDigit(value))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (char.IsLetter(value))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
return value + 256;
|
||||
}
|
||||
}
|
||||
264
src/StellaOps.Feedser/StellaOps.Feedser.Merge/Comparers/Nevra.cs
Normal file
264
src/StellaOps.Feedser/StellaOps.Feedser.Merge/Comparers/Nevra.cs
Normal file
@@ -0,0 +1,264 @@
|
||||
namespace StellaOps.Feedser.Merge.Comparers;
|
||||
|
||||
using System;
|
||||
using StellaOps.Feedser.Normalization.Distro;
|
||||
|
||||
public sealed class NevraComparer : IComparer<Nevra>, IComparer<string>
|
||||
{
|
||||
public static NevraComparer Instance { get; } = new();
|
||||
|
||||
private NevraComparer()
|
||||
{
|
||||
}
|
||||
|
||||
public int Compare(string? x, string? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (x is null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (y is null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
var xParsed = Nevra.TryParse(x, out var xNevra);
|
||||
var yParsed = Nevra.TryParse(y, out var yNevra);
|
||||
|
||||
if (xParsed && yParsed)
|
||||
{
|
||||
return Compare(xNevra, yNevra);
|
||||
}
|
||||
|
||||
if (xParsed)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (yParsed)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
return string.Compare(x, y, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public int Compare(Nevra? x, Nevra? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (x is null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (y is null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
var compare = string.Compare(x.Name, y.Name, StringComparison.Ordinal);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = string.Compare(x.Architecture ?? string.Empty, y.Architecture ?? string.Empty, StringComparison.Ordinal);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = x.Epoch.CompareTo(y.Epoch);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = RpmVersionComparer.Compare(x.Version, y.Version);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = RpmVersionComparer.Compare(x.Release, y.Release);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
internal static class RpmVersionComparer
|
||||
{
|
||||
public static int Compare(string? left, string? right)
|
||||
{
|
||||
left ??= string.Empty;
|
||||
right ??= string.Empty;
|
||||
|
||||
var i = 0;
|
||||
var j = 0;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var leftHasTilde = SkipToNextSegment(left, ref i);
|
||||
var rightHasTilde = SkipToNextSegment(right, ref j);
|
||||
|
||||
if (leftHasTilde || rightHasTilde)
|
||||
{
|
||||
if (leftHasTilde && rightHasTilde)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
return leftHasTilde ? -1 : 1;
|
||||
}
|
||||
|
||||
var leftEnd = i >= left.Length;
|
||||
var rightEnd = j >= right.Length;
|
||||
if (leftEnd || rightEnd)
|
||||
{
|
||||
if (leftEnd && rightEnd)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
return leftEnd ? -1 : 1;
|
||||
}
|
||||
|
||||
var leftDigit = char.IsDigit(left[i]);
|
||||
var rightDigit = char.IsDigit(right[j]);
|
||||
|
||||
if (leftDigit && !rightDigit)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!leftDigit && rightDigit)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
int compare;
|
||||
if (leftDigit)
|
||||
{
|
||||
compare = CompareNumericSegment(left, ref i, right, ref j);
|
||||
}
|
||||
else
|
||||
{
|
||||
compare = CompareAlphaSegment(left, ref i, right, ref j);
|
||||
}
|
||||
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static bool SkipToNextSegment(string value, ref int index)
|
||||
{
|
||||
var sawTilde = false;
|
||||
while (index < value.Length)
|
||||
{
|
||||
var current = value[index];
|
||||
if (current == '~')
|
||||
{
|
||||
sawTilde = true;
|
||||
index++;
|
||||
break;
|
||||
}
|
||||
|
||||
if (char.IsLetterOrDigit(current))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
}
|
||||
|
||||
return sawTilde;
|
||||
}
|
||||
|
||||
private static int CompareNumericSegment(string value, ref int index, string other, ref int otherIndex)
|
||||
{
|
||||
var start = index;
|
||||
while (index < value.Length && char.IsDigit(value[index]))
|
||||
{
|
||||
index++;
|
||||
}
|
||||
|
||||
var otherStart = otherIndex;
|
||||
while (otherIndex < other.Length && char.IsDigit(other[otherIndex]))
|
||||
{
|
||||
otherIndex++;
|
||||
}
|
||||
|
||||
var trimmedStart = start;
|
||||
while (trimmedStart < index && value[trimmedStart] == '0')
|
||||
{
|
||||
trimmedStart++;
|
||||
}
|
||||
|
||||
var otherTrimmedStart = otherStart;
|
||||
while (otherTrimmedStart < otherIndex && other[otherTrimmedStart] == '0')
|
||||
{
|
||||
otherTrimmedStart++;
|
||||
}
|
||||
|
||||
var length = index - trimmedStart;
|
||||
var otherLength = otherIndex - otherTrimmedStart;
|
||||
|
||||
if (length != otherLength)
|
||||
{
|
||||
return length.CompareTo(otherLength);
|
||||
}
|
||||
|
||||
var comparison = value.AsSpan(trimmedStart, length)
|
||||
.CompareTo(other.AsSpan(otherTrimmedStart, otherLength), StringComparison.Ordinal);
|
||||
if (comparison != 0)
|
||||
{
|
||||
return comparison;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static int CompareAlphaSegment(string value, ref int index, string other, ref int otherIndex)
|
||||
{
|
||||
var start = index;
|
||||
while (index < value.Length && char.IsLetter(value[index]))
|
||||
{
|
||||
index++;
|
||||
}
|
||||
|
||||
var otherStart = otherIndex;
|
||||
while (otherIndex < other.Length && char.IsLetter(other[otherIndex]))
|
||||
{
|
||||
otherIndex++;
|
||||
}
|
||||
|
||||
var length = index - start;
|
||||
var otherLength = otherIndex - otherStart;
|
||||
|
||||
var comparison = value.AsSpan(start, length)
|
||||
.CompareTo(other.AsSpan(otherStart, otherLength), StringComparison.Ordinal);
|
||||
if (comparison != 0)
|
||||
{
|
||||
return comparison;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
namespace StellaOps.Feedser.Merge.Comparers;
|
||||
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
using Semver;
|
||||
|
||||
/// <summary>
|
||||
/// Provides helpers to interpret introduced/fixed/lastAffected SemVer ranges and compare versions.
|
||||
/// </summary>
|
||||
public static class SemanticVersionRangeResolver
|
||||
{
|
||||
public static bool TryParse(string? value, [NotNullWhen(true)] out SemVersion? result)
|
||||
=> SemVersion.TryParse(value, SemVersionStyles.Any, out result);
|
||||
|
||||
public static SemVersion Parse(string value)
|
||||
=> SemVersion.Parse(value, SemVersionStyles.Any);
|
||||
|
||||
/// <summary>
|
||||
/// Resolves the effective start and end versions using introduced/fixed/lastAffected semantics.
|
||||
/// </summary>
|
||||
public static (SemVersion? introduced, SemVersion? exclusiveUpperBound, SemVersion? inclusiveUpperBound) ResolveWindows(
|
||||
string? introduced,
|
||||
string? fixedVersion,
|
||||
string? lastAffected)
|
||||
{
|
||||
var introducedVersion = TryParse(introduced, out var parsedIntroduced) ? parsedIntroduced : null;
|
||||
var fixedVersionParsed = TryParse(fixedVersion, out var parsedFixed) ? parsedFixed : null;
|
||||
var lastAffectedVersion = TryParse(lastAffected, out var parsedLast) ? parsedLast : null;
|
||||
|
||||
SemVersion? exclusiveUpper = null;
|
||||
SemVersion? inclusiveUpper = null;
|
||||
|
||||
if (fixedVersionParsed is not null)
|
||||
{
|
||||
exclusiveUpper = fixedVersionParsed;
|
||||
}
|
||||
else if (lastAffectedVersion is not null)
|
||||
{
|
||||
inclusiveUpper = lastAffectedVersion;
|
||||
exclusiveUpper = NextPatch(lastAffectedVersion);
|
||||
}
|
||||
|
||||
return (introducedVersion, exclusiveUpper, inclusiveUpper);
|
||||
}
|
||||
|
||||
|
||||
public static int Compare(string? left, string? right)
|
||||
{
|
||||
var leftParsed = TryParse(left, out var leftSemver);
|
||||
var rightParsed = TryParse(right, out var rightSemver);
|
||||
|
||||
if (leftParsed && rightParsed)
|
||||
{
|
||||
return SemVersion.CompareSortOrder(leftSemver, rightSemver);
|
||||
}
|
||||
|
||||
if (leftParsed)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (rightParsed)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
return string.Compare(left, right, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static SemVersion NextPatch(SemVersion version)
|
||||
{
|
||||
return new SemVersion(version.Major, version.Minor, version.Patch + 1);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Feedser.Merge.Options;
|
||||
|
||||
/// <summary>
|
||||
/// Configurable precedence overrides for advisory sources.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryPrecedenceOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Mapping of provenance source identifiers to precedence ranks. Lower numbers take precedence.
|
||||
/// </summary>
|
||||
public IDictionary<string, int> Ranks { get; init; } = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
@@ -0,0 +1,296 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Feedser.Merge.Options;
|
||||
using StellaOps.Feedser.Models;
|
||||
|
||||
namespace StellaOps.Feedser.Merge.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Merges canonical advisories emitted by different sources into a single precedence-resolved advisory.
|
||||
/// </summary>
|
||||
public sealed class AdvisoryPrecedenceMerger
|
||||
{
|
||||
private static readonly IReadOnlyDictionary<string, int> DefaultPrecedence = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["redhat"] = 0,
|
||||
["ubuntu"] = 0,
|
||||
["debian"] = 0,
|
||||
["suse"] = 0,
|
||||
["msrc"] = 1,
|
||||
["oracle"] = 1,
|
||||
["adobe"] = 1,
|
||||
["chromium"] = 1,
|
||||
["jvn"] = 2,
|
||||
["certfr"] = 2,
|
||||
["certin"] = 2,
|
||||
["ics-kaspersky"] = 2,
|
||||
["kev"] = 6,
|
||||
["nvd"] = 5,
|
||||
};
|
||||
|
||||
private static readonly Meter MergeMeter = new("StellaOps.Feedser.Merge");
|
||||
private static readonly Counter<long> OverridesCounter = MergeMeter.CreateCounter<long>(
|
||||
"feedser.merge.overrides",
|
||||
unit: "count",
|
||||
description: "Number of times lower-precedence advisories were overridden by higher-precedence sources.");
|
||||
|
||||
private readonly AffectedPackagePrecedenceResolver _packageResolver;
|
||||
private readonly IReadOnlyDictionary<string, int> _precedence;
|
||||
private readonly int _fallbackRank;
|
||||
private readonly System.TimeProvider _timeProvider;
|
||||
private readonly ILogger<AdvisoryPrecedenceMerger> _logger;
|
||||
|
||||
public AdvisoryPrecedenceMerger()
|
||||
: this(new AffectedPackagePrecedenceResolver(), DefaultPrecedence, System.TimeProvider.System, NullLogger<AdvisoryPrecedenceMerger>.Instance)
|
||||
{
|
||||
}
|
||||
|
||||
public AdvisoryPrecedenceMerger(AffectedPackagePrecedenceResolver packageResolver, System.TimeProvider? timeProvider = null)
|
||||
: this(packageResolver, DefaultPrecedence, timeProvider ?? System.TimeProvider.System, NullLogger<AdvisoryPrecedenceMerger>.Instance)
|
||||
{
|
||||
}
|
||||
|
||||
public AdvisoryPrecedenceMerger(
|
||||
AffectedPackagePrecedenceResolver packageResolver,
|
||||
IReadOnlyDictionary<string, int> precedence,
|
||||
System.TimeProvider timeProvider)
|
||||
: this(packageResolver, precedence, timeProvider, NullLogger<AdvisoryPrecedenceMerger>.Instance)
|
||||
{
|
||||
}
|
||||
|
||||
public AdvisoryPrecedenceMerger(
|
||||
AffectedPackagePrecedenceResolver packageResolver,
|
||||
AdvisoryPrecedenceOptions? options,
|
||||
System.TimeProvider timeProvider,
|
||||
ILogger<AdvisoryPrecedenceMerger>? logger = null)
|
||||
: this(packageResolver, MergePrecedence(DefaultPrecedence, options), timeProvider, logger)
|
||||
{
|
||||
}
|
||||
|
||||
public AdvisoryPrecedenceMerger(
|
||||
AffectedPackagePrecedenceResolver packageResolver,
|
||||
IReadOnlyDictionary<string, int> precedence,
|
||||
System.TimeProvider timeProvider,
|
||||
ILogger<AdvisoryPrecedenceMerger>? logger)
|
||||
{
|
||||
_packageResolver = packageResolver ?? throw new ArgumentNullException(nameof(packageResolver));
|
||||
_precedence = precedence ?? throw new ArgumentNullException(nameof(precedence));
|
||||
_fallbackRank = _precedence.Count == 0 ? 10 : _precedence.Values.Max() + 1;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? NullLogger<AdvisoryPrecedenceMerger>.Instance;
|
||||
}
|
||||
|
||||
public Advisory Merge(IEnumerable<Advisory> advisories)
|
||||
{
|
||||
if (advisories is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(advisories));
|
||||
}
|
||||
|
||||
var list = advisories.Where(static a => a is not null).ToList();
|
||||
if (list.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one advisory is required for merge.", nameof(advisories));
|
||||
}
|
||||
|
||||
var advisoryKey = list[0].AdvisoryKey;
|
||||
if (list.Any(advisory => !string.Equals(advisory.AdvisoryKey, advisoryKey, StringComparison.Ordinal)))
|
||||
{
|
||||
throw new ArgumentException("All advisories must share the same advisory key.", nameof(advisories));
|
||||
}
|
||||
|
||||
var ordered = list
|
||||
.Select(advisory => new AdvisoryEntry(advisory, GetRank(advisory)))
|
||||
.OrderBy(entry => entry.Rank)
|
||||
.ThenByDescending(entry => entry.Advisory.Provenance.Length)
|
||||
.ToArray();
|
||||
|
||||
var primary = ordered[0].Advisory;
|
||||
|
||||
var title = PickString(ordered, advisory => advisory.Title) ?? advisoryKey;
|
||||
var summary = PickString(ordered, advisory => advisory.Summary);
|
||||
var language = PickString(ordered, advisory => advisory.Language);
|
||||
var severity = PickString(ordered, advisory => advisory.Severity);
|
||||
|
||||
var aliases = ordered
|
||||
.SelectMany(entry => entry.Advisory.Aliases)
|
||||
.Where(static alias => !string.IsNullOrWhiteSpace(alias))
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
var references = ordered
|
||||
.SelectMany(entry => entry.Advisory.References)
|
||||
.Distinct()
|
||||
.ToArray();
|
||||
|
||||
var affectedPackages = _packageResolver.Merge(ordered.SelectMany(entry => entry.Advisory.AffectedPackages));
|
||||
var cvssMetrics = ordered
|
||||
.SelectMany(entry => entry.Advisory.CvssMetrics)
|
||||
.Distinct()
|
||||
.ToArray();
|
||||
|
||||
var published = PickDateTime(ordered, static advisory => advisory.Published);
|
||||
var modified = PickDateTime(ordered, static advisory => advisory.Modified) ?? published;
|
||||
|
||||
var provenance = ordered
|
||||
.SelectMany(entry => entry.Advisory.Provenance)
|
||||
.Distinct()
|
||||
.ToList();
|
||||
|
||||
var precedenceTrace = ordered
|
||||
.SelectMany(entry => entry.Sources)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static source => source, StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
var mergeProvenance = new AdvisoryProvenance(
|
||||
source: "merge",
|
||||
kind: "precedence",
|
||||
value: string.Join("|", precedenceTrace),
|
||||
recordedAt: _timeProvider.GetUtcNow());
|
||||
|
||||
provenance.Add(mergeProvenance);
|
||||
|
||||
var exploitKnown = ordered.Any(entry => entry.Advisory.ExploitKnown);
|
||||
|
||||
LogOverrides(advisoryKey, ordered);
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey,
|
||||
title,
|
||||
summary,
|
||||
language,
|
||||
published,
|
||||
modified,
|
||||
severity,
|
||||
exploitKnown,
|
||||
aliases,
|
||||
references,
|
||||
affectedPackages,
|
||||
cvssMetrics,
|
||||
provenance);
|
||||
}
|
||||
|
||||
private string? PickString(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, string?> selector)
|
||||
{
|
||||
foreach (var entry in ordered)
|
||||
{
|
||||
var value = selector(entry.Advisory);
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return value.Trim();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private DateTimeOffset? PickDateTime(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, DateTimeOffset?> selector)
|
||||
{
|
||||
foreach (var entry in ordered)
|
||||
{
|
||||
var value = selector(entry.Advisory);
|
||||
if (value.HasValue)
|
||||
{
|
||||
return value.Value.ToUniversalTime();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private int GetRank(Advisory advisory)
|
||||
{
|
||||
var best = _fallbackRank;
|
||||
foreach (var provenance in advisory.Provenance)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(provenance.Source))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_precedence.TryGetValue(provenance.Source, out var rank) && rank < best)
|
||||
{
|
||||
best = rank;
|
||||
}
|
||||
}
|
||||
|
||||
return best;
|
||||
}
|
||||
|
||||
private static IReadOnlyDictionary<string, int> MergePrecedence(
|
||||
IReadOnlyDictionary<string, int> defaults,
|
||||
AdvisoryPrecedenceOptions? options)
|
||||
{
|
||||
if (options?.Ranks is null || options.Ranks.Count == 0)
|
||||
{
|
||||
return defaults;
|
||||
}
|
||||
|
||||
var merged = new Dictionary<string, int>(defaults, StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var kvp in options.Ranks)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(kvp.Key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
merged[kvp.Key.Trim()] = kvp.Value;
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
private void LogOverrides(string advisoryKey, IReadOnlyList<AdvisoryEntry> ordered)
|
||||
{
|
||||
if (ordered.Count <= 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var primary = ordered[0];
|
||||
var primaryRank = primary.Rank;
|
||||
var primarySources = string.Join(',', primary.Sources);
|
||||
|
||||
for (var i = 1; i < ordered.Count; i++)
|
||||
{
|
||||
var candidate = ordered[i];
|
||||
if (candidate.Rank <= primaryRank)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var suppressedSources = string.Join(',', candidate.Sources);
|
||||
|
||||
OverridesCounter.Add(
|
||||
1,
|
||||
new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("advisory", advisoryKey),
|
||||
new("primary_sources", primarySources),
|
||||
new("suppressed_sources", suppressedSources),
|
||||
});
|
||||
|
||||
_logger.LogInformation(
|
||||
"Advisory precedence override for {AdvisoryKey}: kept {PrimarySources} (rank {PrimaryRank}) over {SuppressedSources} (rank {SuppressedRank})",
|
||||
advisoryKey,
|
||||
primarySources,
|
||||
primaryRank,
|
||||
suppressedSources,
|
||||
candidate.Rank);
|
||||
}
|
||||
}
|
||||
|
||||
private readonly record struct AdvisoryEntry(Advisory Advisory, int Rank)
|
||||
{
|
||||
public IReadOnlyCollection<string> Sources { get; } = Advisory.Provenance
|
||||
.Select(static p => p.Source)
|
||||
.Where(static source => !string.IsNullOrWhiteSpace(source))
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,105 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Feedser.Models;
|
||||
|
||||
namespace StellaOps.Feedser.Merge.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Applies source precedence rules to affected package sets so authoritative distro ranges override generic registry data.
|
||||
/// </summary>
|
||||
public sealed class AffectedPackagePrecedenceResolver
|
||||
{
|
||||
private static readonly IReadOnlyDictionary<string, int> DefaultPrecedence = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["redhat"] = 0,
|
||||
["ubuntu"] = 0,
|
||||
["debian"] = 0,
|
||||
["suse"] = 0,
|
||||
["msrc"] = 1,
|
||||
["oracle"] = 1,
|
||||
["adobe"] = 1,
|
||||
["chromium"] = 1,
|
||||
["nvd"] = 5,
|
||||
};
|
||||
|
||||
private readonly IReadOnlyDictionary<string, int> _precedence;
|
||||
private readonly int _fallbackRank;
|
||||
|
||||
public AffectedPackagePrecedenceResolver()
|
||||
: this(DefaultPrecedence)
|
||||
{
|
||||
}
|
||||
|
||||
public AffectedPackagePrecedenceResolver(IReadOnlyDictionary<string, int> precedence)
|
||||
{
|
||||
_precedence = precedence ?? throw new ArgumentNullException(nameof(precedence));
|
||||
_fallbackRank = precedence.Count == 0 ? 10 : precedence.Values.Max() + 1;
|
||||
}
|
||||
|
||||
public IReadOnlyList<AffectedPackage> Merge(IEnumerable<AffectedPackage> packages)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(packages);
|
||||
|
||||
var grouped = packages
|
||||
.Where(static pkg => pkg is not null)
|
||||
.GroupBy(pkg => (pkg.Type, pkg.Identifier, pkg.Platform ?? string.Empty));
|
||||
|
||||
var resolved = new List<AffectedPackage>();
|
||||
foreach (var group in grouped)
|
||||
{
|
||||
var ordered = group
|
||||
.OrderBy(GetPrecedence)
|
||||
.ThenByDescending(static pkg => pkg.Provenance.Length)
|
||||
.ThenByDescending(static pkg => pkg.VersionRanges.Length);
|
||||
|
||||
var primary = ordered.First();
|
||||
var provenance = ordered
|
||||
.SelectMany(static pkg => pkg.Provenance)
|
||||
.Where(static p => p is not null)
|
||||
.Distinct()
|
||||
.ToImmutableArray();
|
||||
|
||||
var statuses = ordered
|
||||
.SelectMany(static pkg => pkg.Statuses)
|
||||
.Distinct(AffectedPackageStatusEqualityComparer.Instance)
|
||||
.ToImmutableArray();
|
||||
|
||||
var merged = new AffectedPackage(
|
||||
primary.Type,
|
||||
primary.Identifier,
|
||||
string.IsNullOrWhiteSpace(primary.Platform) ? null : primary.Platform,
|
||||
primary.VersionRanges,
|
||||
statuses,
|
||||
provenance);
|
||||
|
||||
resolved.Add(merged);
|
||||
}
|
||||
|
||||
return resolved
|
||||
.OrderBy(static pkg => pkg.Type, StringComparer.Ordinal)
|
||||
.ThenBy(static pkg => pkg.Identifier, StringComparer.Ordinal)
|
||||
.ThenBy(static pkg => pkg.Platform, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private int GetPrecedence(AffectedPackage package)
|
||||
{
|
||||
var bestRank = _fallbackRank;
|
||||
foreach (var provenance in package.Provenance)
|
||||
{
|
||||
if (provenance is null || string.IsNullOrWhiteSpace(provenance.Source))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (_precedence.TryGetValue(provenance.Source, out var rank) && rank < bestRank)
|
||||
{
|
||||
bestRank = rank;
|
||||
}
|
||||
}
|
||||
|
||||
return bestRank;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
namespace StellaOps.Feedser.Merge.Services;
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic hashes over canonical advisory JSON payloads.
|
||||
/// </summary>
|
||||
public sealed class CanonicalHashCalculator
|
||||
{
|
||||
private static readonly UTF8Encoding Utf8NoBom = new(false);
|
||||
|
||||
public byte[] ComputeHash(Advisory? advisory)
|
||||
{
|
||||
if (advisory is null)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var canonical = CanonicalJsonSerializer.Serialize(CanonicalJsonSerializer.Normalize(advisory));
|
||||
var payload = Utf8NoBom.GetBytes(canonical);
|
||||
return SHA256.HashData(payload);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
namespace StellaOps.Feedser.Merge.Services;
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Feedser.Models;
|
||||
using StellaOps.Feedser.Storage.Mongo.MergeEvents;
|
||||
|
||||
/// <summary>
|
||||
/// Persists merge events with canonical before/after hashes for auditability.
|
||||
/// </summary>
|
||||
public sealed class MergeEventWriter
|
||||
{
|
||||
private readonly IMergeEventStore _mergeEventStore;
|
||||
private readonly CanonicalHashCalculator _hashCalculator;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<MergeEventWriter> _logger;
|
||||
|
||||
public MergeEventWriter(
|
||||
IMergeEventStore mergeEventStore,
|
||||
CanonicalHashCalculator hashCalculator,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<MergeEventWriter> logger)
|
||||
{
|
||||
_mergeEventStore = mergeEventStore ?? throw new ArgumentNullException(nameof(mergeEventStore));
|
||||
_hashCalculator = hashCalculator ?? throw new ArgumentNullException(nameof(hashCalculator));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<MergeEventRecord> AppendAsync(
|
||||
string advisoryKey,
|
||||
Advisory? before,
|
||||
Advisory after,
|
||||
IReadOnlyList<Guid> inputDocumentIds,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryKey);
|
||||
ArgumentNullException.ThrowIfNull(after);
|
||||
|
||||
var beforeHash = _hashCalculator.ComputeHash(before);
|
||||
var afterHash = _hashCalculator.ComputeHash(after);
|
||||
var timestamp = _timeProvider.GetUtcNow();
|
||||
var documentIds = inputDocumentIds?.ToArray() ?? Array.Empty<Guid>();
|
||||
|
||||
var record = new MergeEventRecord(
|
||||
Guid.NewGuid(),
|
||||
advisoryKey,
|
||||
beforeHash,
|
||||
afterHash,
|
||||
timestamp,
|
||||
documentIds);
|
||||
|
||||
if (!CryptographicOperations.FixedTimeEquals(beforeHash, afterHash))
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Merge event for {AdvisoryKey} changed hash {BeforeHash} -> {AfterHash}",
|
||||
advisoryKey,
|
||||
Convert.ToHexString(beforeHash),
|
||||
Convert.ToHexString(afterHash));
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation("Merge event for {AdvisoryKey} recorded without hash change", advisoryKey);
|
||||
}
|
||||
|
||||
await _mergeEventStore.AppendAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
return record;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
@@ -7,7 +8,9 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Semver" Version="2.3.0" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
13
src/StellaOps.Feedser/StellaOps.Feedser.Merge/TASKS.md
Normal file
13
src/StellaOps.Feedser/StellaOps.Feedser.Merge/TASKS.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|Identity graph and alias resolver|BE-Merge|Models, Storage.Mongo|Deterministic key choice; cycle-safe.|
|
||||
|Precedence policy engine|BE-Merge|Architecture|PSIRT/OVAL > NVD; CERTs enrich; KEV flag.|
|
||||
|NEVRA comparer plus tests|BE-Merge (Distro WG)|Source.Distro fixtures|DONE – Added Nevra parser/comparer with tilde-aware rpm ordering and unit coverage.|
|
||||
|Debian EVR comparer plus tests|BE-Merge (Distro WG)|Debian fixtures|DONE – DebianEvr comparer mirrors dpkg ordering with tilde/epoch handling and unit coverage.|
|
||||
|SemVer range resolver plus tests|BE-Merge (OSS WG)|OSV/GHSA fixtures|DONE – SemanticVersionRangeResolver covers introduced/fixed/lastAffected semantics with SemVer ordering tests.|
|
||||
|Canonical hash and merge_event writer|BE-Merge|Models, Storage.Mongo|DONE – Hash calculator + MergeEventWriter compute canonical SHA-256 digests and persist merge events.|
|
||||
|Conflict detection and metrics|BE-Merge|Core|Counters; structured logs; traces.|
|
||||
|End-to-end determinism test|QA|Merge, key connectors|Same inputs -> same hashes.|
|
||||
|Override audit logging|BE-Merge|Observability|DOING – structured override logging and metrics emitted; await production telemetry review.|
|
||||
|Configurable precedence table|BE-Merge|Architecture|DOING – precedence overrides now accepted via options; document operator workflow.|
|
||||
25
src/StellaOps.Feedser/StellaOps.Feedser.Models/AGENTS.md
Normal file
25
src/StellaOps.Feedser/StellaOps.Feedser.Models/AGENTS.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# AGENTS
|
||||
## Role
|
||||
Canonical data model for normalized advisories and all downstream serialization. Source of truth for merge/export.
|
||||
## Scope
|
||||
- Canonical types: Advisory, AdvisoryReference, CvssMetric, AffectedPackage, AffectedVersionRange, AdvisoryProvenance.
|
||||
- Invariants: stable ordering, culture-invariant serialization, UTC timestamps, deterministic equality semantics.
|
||||
- Field semantics: preserve all aliases/references; ranges per ecosystem (NEVRA/EVR/SemVer); provenance on every mapped field.
|
||||
- Backward/forward compatibility: additive evolution; versioned DTOs where needed; no breaking field renames.
|
||||
- Detailed field coverage documented in `CANONICAL_RECORDS.md`; update alongside model changes.
|
||||
## Participants
|
||||
- Source connectors map external DTOs into these types.
|
||||
- Merge engine composes/overrides AffectedPackage sets and consolidates references/aliases.
|
||||
- Exporters serialize canonical documents deterministically.
|
||||
## Interfaces & contracts
|
||||
- Null-object statics: Advisory.Empty, AdvisoryReference.Empty, CvssMetric.Empty.
|
||||
- AffectedPackage.Type describes semantics (e.g., rpm, deb, cpe, semver). Identifier is stable (e.g., NEVRA, PURL, CPE).
|
||||
- Version ranges list is ordered by introduction then fix; provenance identifies source/kind/value/recordedAt.
|
||||
- Alias schemes must include CVE, GHSA, OSV, JVN/JVNDB, BDU, VU(CERT/CC), MSRC, CISCO-SA, ORACLE-CPU, APSB/APA, APPLE-HT, CHROMIUM-POST, VMSA, RHSA, USN, DSA, SUSE-SU, ICSA, CWE, CPE, PURL.
|
||||
## In/Out of scope
|
||||
In: data shapes, invariants, helpers for canonical serialization and comparison.
|
||||
Out: fetching/parsing external schemas, storage, HTTP.
|
||||
## Observability & security expectations
|
||||
- No secrets; purely in-memory types.
|
||||
- Provide debug renders for test snapshots (canonical JSON).
|
||||
- Emit model version identifiers in logs when canonical structures change; keep adapters for older readers until deprecated.
|
||||
145
src/StellaOps.Feedser/StellaOps.Feedser.Models/Advisory.cs
Normal file
145
src/StellaOps.Feedser/StellaOps.Feedser.Models/Advisory.cs
Normal file
@@ -0,0 +1,145 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical advisory document produced after merge. Collections are pre-sorted for deterministic serialization.
|
||||
/// </summary>
|
||||
public sealed record Advisory
|
||||
{
|
||||
public static Advisory Empty { get; } = new(
|
||||
advisoryKey: "unknown",
|
||||
title: "",
|
||||
summary: null,
|
||||
language: null,
|
||||
published: null,
|
||||
modified: null,
|
||||
severity: null,
|
||||
exploitKnown: false,
|
||||
aliases: Array.Empty<string>(),
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: Array.Empty<AdvisoryProvenance>());
|
||||
|
||||
public Advisory(
|
||||
string advisoryKey,
|
||||
string title,
|
||||
string? summary,
|
||||
string? language,
|
||||
DateTimeOffset? published,
|
||||
DateTimeOffset? modified,
|
||||
string? severity,
|
||||
bool exploitKnown,
|
||||
IEnumerable<string>? aliases,
|
||||
IEnumerable<AdvisoryReference>? references,
|
||||
IEnumerable<AffectedPackage>? affectedPackages,
|
||||
IEnumerable<CvssMetric>? cvssMetrics,
|
||||
IEnumerable<AdvisoryProvenance>? provenance)
|
||||
{
|
||||
AdvisoryKey = Validation.EnsureNotNullOrWhiteSpace(advisoryKey, nameof(advisoryKey));
|
||||
Title = Validation.EnsureNotNullOrWhiteSpace(title, nameof(title));
|
||||
Summary = Validation.TrimToNull(summary);
|
||||
Language = Validation.TrimToNull(language)?.ToLowerInvariant();
|
||||
Published = published?.ToUniversalTime();
|
||||
Modified = modified?.ToUniversalTime();
|
||||
Severity = SeverityNormalization.Normalize(severity);
|
||||
ExploitKnown = exploitKnown;
|
||||
|
||||
Aliases = (aliases ?? Array.Empty<string>())
|
||||
.Select(static alias => Validation.TryNormalizeAlias(alias, out var normalized) ? normalized! : null)
|
||||
.Where(static alias => alias is not null)
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(static alias => alias, StringComparer.Ordinal)
|
||||
.Select(static alias => alias!)
|
||||
.ToImmutableArray();
|
||||
|
||||
References = (references ?? Array.Empty<AdvisoryReference>())
|
||||
.Where(static reference => reference is not null)
|
||||
.OrderBy(static reference => reference.Url, StringComparer.Ordinal)
|
||||
.ThenBy(static reference => reference.Kind, StringComparer.Ordinal)
|
||||
.ThenBy(static reference => reference.SourceTag, StringComparer.Ordinal)
|
||||
.ThenBy(static reference => reference.Provenance.RecordedAt)
|
||||
.ToImmutableArray();
|
||||
|
||||
AffectedPackages = (affectedPackages ?? Array.Empty<AffectedPackage>())
|
||||
.Where(static package => package is not null)
|
||||
.OrderBy(static package => package.Type, StringComparer.Ordinal)
|
||||
.ThenBy(static package => package.Identifier, StringComparer.Ordinal)
|
||||
.ThenBy(static package => package.Platform, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
CvssMetrics = (cvssMetrics ?? Array.Empty<CvssMetric>())
|
||||
.Where(static metric => metric is not null)
|
||||
.OrderBy(static metric => metric.Version, StringComparer.Ordinal)
|
||||
.ThenBy(static metric => metric.Vector, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
Provenance = (provenance ?? Array.Empty<AdvisoryProvenance>())
|
||||
.Where(static p => p is not null)
|
||||
.OrderBy(static p => p.Source, StringComparer.Ordinal)
|
||||
.ThenBy(static p => p.Kind, StringComparer.Ordinal)
|
||||
.ThenBy(static p => p.RecordedAt)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
[JsonConstructor]
|
||||
public Advisory(
|
||||
string advisoryKey,
|
||||
string title,
|
||||
string? summary,
|
||||
string? language,
|
||||
DateTimeOffset? published,
|
||||
DateTimeOffset? modified,
|
||||
string? severity,
|
||||
bool exploitKnown,
|
||||
ImmutableArray<string> aliases,
|
||||
ImmutableArray<AdvisoryReference> references,
|
||||
ImmutableArray<AffectedPackage> affectedPackages,
|
||||
ImmutableArray<CvssMetric> cvssMetrics,
|
||||
ImmutableArray<AdvisoryProvenance> provenance)
|
||||
: this(
|
||||
advisoryKey,
|
||||
title,
|
||||
summary,
|
||||
language,
|
||||
published,
|
||||
modified,
|
||||
severity,
|
||||
exploitKnown,
|
||||
aliases.IsDefault ? null : aliases.AsEnumerable(),
|
||||
references.IsDefault ? null : references.AsEnumerable(),
|
||||
affectedPackages.IsDefault ? null : affectedPackages.AsEnumerable(),
|
||||
cvssMetrics.IsDefault ? null : cvssMetrics.AsEnumerable(),
|
||||
provenance.IsDefault ? null : provenance.AsEnumerable())
|
||||
{
|
||||
}
|
||||
|
||||
public string AdvisoryKey { get; }
|
||||
|
||||
public string Title { get; }
|
||||
|
||||
public string? Summary { get; }
|
||||
|
||||
public string? Language { get; }
|
||||
|
||||
public DateTimeOffset? Published { get; }
|
||||
|
||||
public DateTimeOffset? Modified { get; }
|
||||
|
||||
public string? Severity { get; }
|
||||
|
||||
public bool ExploitKnown { get; }
|
||||
|
||||
public ImmutableArray<string> Aliases { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryReference> References { get; }
|
||||
|
||||
public ImmutableArray<AffectedPackage> AffectedPackages { get; }
|
||||
|
||||
public ImmutableArray<CvssMetric> CvssMetrics { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryProvenance> Provenance { get; }
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Describes the origin of a canonical field and how/when it was captured.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryProvenance
|
||||
{
|
||||
public static AdvisoryProvenance Empty { get; } = new("unknown", "unspecified", string.Empty, DateTimeOffset.UnixEpoch);
|
||||
|
||||
[JsonConstructor]
|
||||
public AdvisoryProvenance(string source, string kind, string value, DateTimeOffset recordedAt)
|
||||
{
|
||||
Source = Validation.EnsureNotNullOrWhiteSpace(source, nameof(source));
|
||||
Kind = Validation.EnsureNotNullOrWhiteSpace(kind, nameof(kind));
|
||||
Value = Validation.TrimToNull(value);
|
||||
RecordedAt = recordedAt.ToUniversalTime();
|
||||
}
|
||||
|
||||
public string Source { get; }
|
||||
|
||||
public string Kind { get; }
|
||||
|
||||
public string? Value { get; }
|
||||
|
||||
public DateTimeOffset RecordedAt { get; }
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical external reference associated with an advisory.
|
||||
/// </summary>
|
||||
public sealed record AdvisoryReference
|
||||
{
|
||||
public static AdvisoryReference Empty { get; } = new("https://invalid.local/", kind: null, sourceTag: null, summary: null, provenance: AdvisoryProvenance.Empty);
|
||||
|
||||
[JsonConstructor]
|
||||
public AdvisoryReference(string url, string? kind, string? sourceTag, string? summary, AdvisoryProvenance provenance)
|
||||
{
|
||||
if (!Validation.LooksLikeHttpUrl(url))
|
||||
{
|
||||
throw new ArgumentException("Reference URL must be an absolute http(s) URI.", nameof(url));
|
||||
}
|
||||
|
||||
Url = url;
|
||||
Kind = Validation.TrimToNull(kind);
|
||||
SourceTag = Validation.TrimToNull(sourceTag);
|
||||
Summary = Validation.TrimToNull(summary);
|
||||
Provenance = provenance ?? AdvisoryProvenance.Empty;
|
||||
}
|
||||
|
||||
public string Url { get; }
|
||||
|
||||
public string? Kind { get; }
|
||||
|
||||
public string? SourceTag { get; }
|
||||
|
||||
public string? Summary { get; }
|
||||
|
||||
public AdvisoryProvenance Provenance { get; }
|
||||
}
|
||||
@@ -0,0 +1,87 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Canonical affected package descriptor with deterministic ordering of ranges and provenance.
|
||||
/// </summary>
|
||||
public sealed record AffectedPackage
|
||||
{
|
||||
public static AffectedPackage Empty { get; } = new(
|
||||
AffectedPackageTypes.SemVer,
|
||||
identifier: "unknown",
|
||||
platform: null,
|
||||
versionRanges: Array.Empty<AffectedVersionRange>(),
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: Array.Empty<AdvisoryProvenance>());
|
||||
|
||||
[JsonConstructor]
|
||||
public AffectedPackage(
|
||||
string type,
|
||||
string identifier,
|
||||
string? platform = null,
|
||||
IEnumerable<AffectedVersionRange>? versionRanges = null,
|
||||
IEnumerable<AffectedPackageStatus>? statuses = null,
|
||||
IEnumerable<AdvisoryProvenance>? provenance = null)
|
||||
{
|
||||
Type = Validation.EnsureNotNullOrWhiteSpace(type, nameof(type)).ToLowerInvariant();
|
||||
Identifier = Validation.EnsureNotNullOrWhiteSpace(identifier, nameof(identifier));
|
||||
Platform = Validation.TrimToNull(platform);
|
||||
|
||||
VersionRanges = (versionRanges ?? Array.Empty<AffectedVersionRange>())
|
||||
.Distinct(AffectedVersionRangeEqualityComparer.Instance)
|
||||
.OrderBy(static range => range, AffectedVersionRangeComparer.Instance)
|
||||
.ToImmutableArray();
|
||||
|
||||
Statuses = (statuses ?? Array.Empty<AffectedPackageStatus>())
|
||||
.Where(static status => status is not null)
|
||||
.Distinct(AffectedPackageStatusEqualityComparer.Instance)
|
||||
.OrderBy(static status => status.Status, StringComparer.Ordinal)
|
||||
.ThenBy(static status => status.Provenance.Source, StringComparer.Ordinal)
|
||||
.ThenBy(static status => status.Provenance.Kind, StringComparer.Ordinal)
|
||||
.ThenBy(static status => status.Provenance.RecordedAt)
|
||||
.ToImmutableArray();
|
||||
|
||||
Provenance = (provenance ?? Array.Empty<AdvisoryProvenance>())
|
||||
.Where(static p => p is not null)
|
||||
.OrderBy(static p => p.Source, StringComparer.Ordinal)
|
||||
.ThenBy(static p => p.Kind, StringComparer.Ordinal)
|
||||
.ThenBy(static p => p.RecordedAt)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Semantic type of the coordinates (rpm, deb, cpe, semver, vendor, ics-vendor).
|
||||
/// </summary>
|
||||
public string Type { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Canonical identifier for the package (NEVRA, PackageURL, CPE string, vendor slug, etc.).
|
||||
/// </summary>
|
||||
public string Identifier { get; }
|
||||
|
||||
public string? Platform { get; }
|
||||
|
||||
public ImmutableArray<AffectedVersionRange> VersionRanges { get; }
|
||||
|
||||
public ImmutableArray<AffectedPackageStatus> Statuses { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryProvenance> Provenance { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Known values for <see cref="AffectedPackage.Type"/>.
|
||||
/// </summary>
|
||||
public static class AffectedPackageTypes
|
||||
{
|
||||
public const string Rpm = "rpm";
|
||||
public const string Deb = "deb";
|
||||
public const string Cpe = "cpe";
|
||||
public const string SemVer = "semver";
|
||||
public const string Vendor = "vendor";
|
||||
public const string IcsVendor = "ics-vendor";
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a vendor-supplied status tag for an affected package when a concrete version range is unavailable or supplementary.
|
||||
/// </summary>
|
||||
public sealed record AffectedPackageStatus
|
||||
{
|
||||
[JsonConstructor]
|
||||
public AffectedPackageStatus(string status, AdvisoryProvenance provenance)
|
||||
{
|
||||
Status = AffectedPackageStatusCatalog.Normalize(status);
|
||||
Provenance = provenance ?? AdvisoryProvenance.Empty;
|
||||
}
|
||||
|
||||
public string Status { get; }
|
||||
|
||||
public AdvisoryProvenance Provenance { get; }
|
||||
}
|
||||
|
||||
public sealed class AffectedPackageStatusEqualityComparer : IEqualityComparer<AffectedPackageStatus>
|
||||
{
|
||||
public static AffectedPackageStatusEqualityComparer Instance { get; } = new();
|
||||
|
||||
public bool Equals(AffectedPackageStatus? x, AffectedPackageStatus? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (x is null || y is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return string.Equals(x.Status, y.Status, StringComparison.Ordinal)
|
||||
&& EqualityComparer<AdvisoryProvenance>.Default.Equals(x.Provenance, y.Provenance);
|
||||
}
|
||||
|
||||
public int GetHashCode(AffectedPackageStatus obj)
|
||||
=> HashCode.Combine(obj.Status, obj.Provenance);
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Central registry of allowed affected-package status labels to keep connectors consistent.
|
||||
/// </summary>
|
||||
public static class AffectedPackageStatusCatalog
|
||||
{
|
||||
public const string KnownAffected = "known_affected";
|
||||
public const string KnownNotAffected = "known_not_affected";
|
||||
public const string UnderInvestigation = "under_investigation";
|
||||
public const string Fixed = "fixed";
|
||||
public const string FirstFixed = "first_fixed";
|
||||
public const string Mitigated = "mitigated";
|
||||
public const string NotApplicable = "not_applicable";
|
||||
public const string Affected = "affected";
|
||||
public const string NotAffected = "not_affected";
|
||||
public const string Pending = "pending";
|
||||
public const string Unknown = "unknown";
|
||||
|
||||
private static readonly HashSet<string> AllowedStatuses = new(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
KnownAffected,
|
||||
KnownNotAffected,
|
||||
UnderInvestigation,
|
||||
Fixed,
|
||||
FirstFixed,
|
||||
Mitigated,
|
||||
NotApplicable,
|
||||
Affected,
|
||||
NotAffected,
|
||||
Pending,
|
||||
Unknown,
|
||||
};
|
||||
|
||||
public static IReadOnlyCollection<string> Allowed => AllowedStatuses;
|
||||
|
||||
public static string Normalize(string status)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(status))
|
||||
{
|
||||
throw new ArgumentException("Status must be provided.", nameof(status));
|
||||
}
|
||||
|
||||
var token = status.Trim().ToLowerInvariant().Replace(' ', '_').Replace('-', '_');
|
||||
if (!AllowedStatuses.Contains(token))
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(status), status, "Status is not part of the allowed affected-package status glossary.");
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,145 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Describes a contiguous range of versions impacted by an advisory.
|
||||
/// </summary>
|
||||
public sealed record AffectedVersionRange
|
||||
{
|
||||
[JsonConstructor]
|
||||
public AffectedVersionRange(
|
||||
string rangeKind,
|
||||
string? introducedVersion,
|
||||
string? fixedVersion,
|
||||
string? lastAffectedVersion,
|
||||
string? rangeExpression,
|
||||
AdvisoryProvenance provenance)
|
||||
{
|
||||
RangeKind = Validation.EnsureNotNullOrWhiteSpace(rangeKind, nameof(rangeKind)).ToLowerInvariant();
|
||||
IntroducedVersion = Validation.TrimToNull(introducedVersion);
|
||||
FixedVersion = Validation.TrimToNull(fixedVersion);
|
||||
LastAffectedVersion = Validation.TrimToNull(lastAffectedVersion);
|
||||
RangeExpression = Validation.TrimToNull(rangeExpression);
|
||||
Provenance = provenance ?? AdvisoryProvenance.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Semantic kind of the range (e.g., semver, nevra, evr).
|
||||
/// </summary>
|
||||
public string RangeKind { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Inclusive version where impact begins.
|
||||
/// </summary>
|
||||
public string? IntroducedVersion { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Exclusive version where impact ends due to a fix.
|
||||
/// </summary>
|
||||
public string? FixedVersion { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Inclusive upper bound where the vendor reports exposure (when no fix available).
|
||||
/// </summary>
|
||||
public string? LastAffectedVersion { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Normalized textual representation of the range (fallback).
|
||||
/// </summary>
|
||||
public string? RangeExpression { get; }
|
||||
|
||||
public AdvisoryProvenance Provenance { get; }
|
||||
|
||||
public string CreateDeterministicKey()
|
||||
=> string.Join('|', RangeKind, IntroducedVersion ?? string.Empty, FixedVersion ?? string.Empty, LastAffectedVersion ?? string.Empty, RangeExpression ?? string.Empty);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic comparer for version ranges. Orders by introduced, fixed, last affected, expression, kind.
|
||||
/// </summary>
|
||||
public sealed class AffectedVersionRangeComparer : IComparer<AffectedVersionRange>
|
||||
{
|
||||
public static AffectedVersionRangeComparer Instance { get; } = new();
|
||||
|
||||
private static readonly StringComparer Comparer = StringComparer.Ordinal;
|
||||
|
||||
public int Compare(AffectedVersionRange? x, AffectedVersionRange? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (x is null)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (y is null)
|
||||
{
|
||||
return 1;
|
||||
}
|
||||
|
||||
var compare = Comparer.Compare(x.IntroducedVersion, y.IntroducedVersion);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = Comparer.Compare(x.FixedVersion, y.FixedVersion);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = Comparer.Compare(x.LastAffectedVersion, y.LastAffectedVersion);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
compare = Comparer.Compare(x.RangeExpression, y.RangeExpression);
|
||||
if (compare != 0)
|
||||
{
|
||||
return compare;
|
||||
}
|
||||
|
||||
return Comparer.Compare(x.RangeKind, y.RangeKind);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Equality comparer that ignores provenance differences.
|
||||
/// </summary>
|
||||
public sealed class AffectedVersionRangeEqualityComparer : IEqualityComparer<AffectedVersionRange>
|
||||
{
|
||||
public static AffectedVersionRangeEqualityComparer Instance { get; } = new();
|
||||
|
||||
public bool Equals(AffectedVersionRange? x, AffectedVersionRange? y)
|
||||
{
|
||||
if (ReferenceEquals(x, y))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (x is null || y is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return string.Equals(x.RangeKind, y.RangeKind, StringComparison.Ordinal)
|
||||
&& string.Equals(x.IntroducedVersion, y.IntroducedVersion, StringComparison.Ordinal)
|
||||
&& string.Equals(x.FixedVersion, y.FixedVersion, StringComparison.Ordinal)
|
||||
&& string.Equals(x.LastAffectedVersion, y.LastAffectedVersion, StringComparison.Ordinal)
|
||||
&& string.Equals(x.RangeExpression, y.RangeExpression, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public int GetHashCode(AffectedVersionRange obj)
|
||||
=> HashCode.Combine(
|
||||
obj.RangeKind,
|
||||
obj.IntroducedVersion,
|
||||
obj.FixedVersion,
|
||||
obj.LastAffectedVersion,
|
||||
obj.RangeExpression);
|
||||
}
|
||||
@@ -0,0 +1,166 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
public static class AliasSchemeRegistry
|
||||
{
|
||||
private sealed record AliasScheme(
|
||||
string Name,
|
||||
Func<string?, bool> Predicate,
|
||||
Func<string?, string> Normalizer);
|
||||
|
||||
private static readonly AliasScheme[] SchemeDefinitions =
|
||||
{
|
||||
BuildScheme(AliasSchemes.Cve, alias => alias is not null && Matches(CvERegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "CVE")),
|
||||
BuildScheme(AliasSchemes.Ghsa, alias => alias is not null && Matches(GhsaRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "GHSA")),
|
||||
BuildScheme(AliasSchemes.OsV, alias => alias is not null && Matches(OsVRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "OSV")),
|
||||
BuildScheme(AliasSchemes.Jvn, alias => alias is not null && Matches(JvnRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "JVN")),
|
||||
BuildScheme(AliasSchemes.Jvndb, alias => alias is not null && Matches(JvndbRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "JVNDB")),
|
||||
BuildScheme(AliasSchemes.Bdu, alias => alias is not null && Matches(BduRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "BDU")),
|
||||
BuildScheme(AliasSchemes.Vu, alias => alias is not null && alias.StartsWith("VU#", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "VU", preserveSeparator: '#')),
|
||||
BuildScheme(AliasSchemes.Msrc, alias => alias is not null && alias.StartsWith("MSRC-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "MSRC")),
|
||||
BuildScheme(AliasSchemes.CiscoSa, alias => alias is not null && alias.StartsWith("CISCO-SA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "CISCO-SA")),
|
||||
BuildScheme(AliasSchemes.OracleCpu, alias => alias is not null && alias.StartsWith("ORACLE-CPU", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "ORACLE-CPU")),
|
||||
BuildScheme(AliasSchemes.Apsb, alias => alias is not null && alias.StartsWith("APSB-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "APSB")),
|
||||
BuildScheme(AliasSchemes.Apa, alias => alias is not null && alias.StartsWith("APA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "APA")),
|
||||
BuildScheme(AliasSchemes.AppleHt, alias => alias is not null && alias.StartsWith("APPLE-HT", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "APPLE-HT")),
|
||||
BuildScheme(AliasSchemes.ChromiumPost, alias => alias is not null && (alias.StartsWith("CHROMIUM-POST", StringComparison.OrdinalIgnoreCase) || alias.StartsWith("CHROMIUM:", StringComparison.OrdinalIgnoreCase)), NormalizeChromium),
|
||||
BuildScheme(AliasSchemes.Vmsa, alias => alias is not null && alias.StartsWith("VMSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "VMSA")),
|
||||
BuildScheme(AliasSchemes.Rhsa, alias => alias is not null && alias.StartsWith("RHSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "RHSA")),
|
||||
BuildScheme(AliasSchemes.Usn, alias => alias is not null && alias.StartsWith("USN-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "USN")),
|
||||
BuildScheme(AliasSchemes.Dsa, alias => alias is not null && alias.StartsWith("DSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "DSA")),
|
||||
BuildScheme(AliasSchemes.SuseSu, alias => alias is not null && alias.StartsWith("SUSE-SU-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "SUSE-SU")),
|
||||
BuildScheme(AliasSchemes.Icsa, alias => alias is not null && alias.StartsWith("ICSA-", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "ICSA")),
|
||||
BuildScheme(AliasSchemes.Cwe, alias => alias is not null && Matches(CweRegex, alias), alias => alias is null ? string.Empty : NormalizePrefix(alias, "CWE")),
|
||||
BuildScheme(AliasSchemes.Cpe, alias => alias is not null && alias.StartsWith("cpe:", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "cpe", uppercase:false)),
|
||||
BuildScheme(AliasSchemes.Purl, alias => alias is not null && alias.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase), alias => NormalizePrefix(alias, "pkg", uppercase:false)),
|
||||
};
|
||||
|
||||
private static AliasScheme BuildScheme(string name, Func<string?, bool> predicate, Func<string?, string> normalizer)
|
||||
=> new(
|
||||
name,
|
||||
predicate,
|
||||
alias => normalizer(alias));
|
||||
|
||||
private static readonly ImmutableHashSet<string> SchemeNames = SchemeDefinitions
|
||||
.Select(static scheme => scheme.Name)
|
||||
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
private static readonly Regex CvERegex = new("^CVE-\\d{4}-\\d{4,}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex GhsaRegex = new("^GHSA-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex OsVRegex = new("^OSV-\\d{4}-\\d+$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex JvnRegex = new("^JVN-\\d{4}-\\d{6}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex JvndbRegex = new("^JVNDB-\\d{4}-\\d{6}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex BduRegex = new("^BDU-\\d{4}-\\d+$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
private static readonly Regex CweRegex = new("^CWE-\\d+$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
|
||||
public static IReadOnlyCollection<string> KnownSchemes => SchemeNames;
|
||||
|
||||
public static bool IsKnownScheme(string? scheme)
|
||||
=> !string.IsNullOrWhiteSpace(scheme) && SchemeNames.Contains(scheme);
|
||||
|
||||
public static bool TryGetScheme(string? alias, out string scheme)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(alias))
|
||||
{
|
||||
scheme = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
var candidate = alias.Trim();
|
||||
foreach (var entry in SchemeDefinitions)
|
||||
{
|
||||
if (entry.Predicate(candidate))
|
||||
{
|
||||
scheme = entry.Name;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
scheme = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
public static bool TryNormalize(string? alias, out string normalized, out string scheme)
|
||||
{
|
||||
normalized = string.Empty;
|
||||
scheme = string.Empty;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(alias))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var candidate = alias.Trim();
|
||||
foreach (var entry in SchemeDefinitions)
|
||||
{
|
||||
if (entry.Predicate(candidate))
|
||||
{
|
||||
scheme = entry.Name;
|
||||
normalized = entry.Normalizer(candidate);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
normalized = candidate;
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string NormalizePrefix(string? alias, string prefix, bool uppercase = true, char? preserveSeparator = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(alias))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var comparison = StringComparison.OrdinalIgnoreCase;
|
||||
if (!alias.StartsWith(prefix, comparison))
|
||||
{
|
||||
return uppercase ? alias : alias.ToLowerInvariant();
|
||||
}
|
||||
|
||||
var remainder = alias[prefix.Length..];
|
||||
if (preserveSeparator is { } separator && remainder.Length > 0 && remainder[0] != separator)
|
||||
{
|
||||
// Edge case: alias is expected to use a specific separator but does not – return unchanged.
|
||||
return uppercase ? prefix.ToUpperInvariant() + remainder : prefix + remainder;
|
||||
}
|
||||
|
||||
var normalizedPrefix = uppercase ? prefix.ToUpperInvariant() : prefix.ToLowerInvariant();
|
||||
return normalizedPrefix + remainder;
|
||||
}
|
||||
|
||||
private static string NormalizeChromium(string? alias)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(alias))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
if (alias.StartsWith("CHROMIUM-POST", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return NormalizePrefix(alias, "CHROMIUM-POST");
|
||||
}
|
||||
|
||||
if (alias.StartsWith("CHROMIUM:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var remainder = alias["CHROMIUM".Length..];
|
||||
return "CHROMIUM" + remainder;
|
||||
}
|
||||
|
||||
return alias;
|
||||
}
|
||||
private static bool Matches(Regex? regex, string? candidate)
|
||||
{
|
||||
if (regex is null || string.IsNullOrWhiteSpace(candidate))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return regex.IsMatch(candidate);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
namespace StellaOps.Feedser.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Well-known alias scheme identifiers referenced throughout the pipeline.
|
||||
/// </summary>
|
||||
public static class AliasSchemes
|
||||
{
|
||||
public const string Cve = "CVE";
|
||||
public const string Ghsa = "GHSA";
|
||||
public const string OsV = "OSV";
|
||||
public const string Jvn = "JVN";
|
||||
public const string Jvndb = "JVNDB";
|
||||
public const string Bdu = "BDU";
|
||||
public const string Vu = "VU";
|
||||
public const string Msrc = "MSRC";
|
||||
public const string CiscoSa = "CISCO-SA";
|
||||
public const string OracleCpu = "ORACLE-CPU";
|
||||
public const string Apsb = "APSB";
|
||||
public const string Apa = "APA";
|
||||
public const string AppleHt = "APPLE-HT";
|
||||
public const string ChromiumPost = "CHROMIUM-POST";
|
||||
public const string Vmsa = "VMSA";
|
||||
public const string Rhsa = "RHSA";
|
||||
public const string Usn = "USN";
|
||||
public const string Dsa = "DSA";
|
||||
public const string SuseSu = "SUSE-SU";
|
||||
public const string Icsa = "ICSA";
|
||||
public const string Cwe = "CWE";
|
||||
public const string Cpe = "CPE";
|
||||
public const string Purl = "PURL";
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user