Refactor code structure for improved readability and maintainability; optimize performance in key functions.

This commit is contained in:
master
2025-12-22 19:06:31 +02:00
parent dfaa2079aa
commit 4602ccc3a3
1444 changed files with 109919 additions and 8058 deletions

View File

@@ -0,0 +1,25 @@
# Concelier Alpine Connector Charter
## Mission
Implement and maintain the Alpine secdb connector that ingests Alpine Linux package fix data into Concelier under the Aggregation-Only Contract (AOC). Preserve APK version semantics and provenance while keeping ingestion deterministic and offline-ready.
## Scope
- Connector fetch/parse/map logic in `StellaOps.Concelier.Connector.Distro.Alpine`.
- Alpine secdb JSON parsing and normalization of package fix entries.
- Source cursor/fetch caching and deterministic mapping.
- Unit/integration tests and fixtures for secdb parsing and mapping.
## Required Reading
- `docs/modules/concelier/architecture.md`
- `docs/ingestion/aggregation-only-contract.md`
- `docs/modules/concelier/operations/connectors/alpine.md`
- `docs/modules/concelier/operations/mirror.md`
- `docs/product-advisories/archived/22-Dec-2025 - Getting Distro Backport Logic Right.md`
## Working Agreement
1. **Status sync**: update task state to `DOING`/`DONE` in the sprint file and local `TASKS.md` before/after work.
2. **AOC adherence**: do not derive severity or merge fields; persist upstream data with provenance.
3. **Determinism**: sort packages, version keys, and CVE lists; normalize timestamps to UTC ISO-8601.
4. **Offline readiness**: only fetch from allowlisted secdb hosts; document bundle usage for air-gapped runs.
5. **Testing**: add fixtures for parsing and mapping; keep integration tests deterministic and opt-in.
6. **Documentation**: update connector ops docs when configuration or mapping changes.

View File

@@ -0,0 +1,538 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Documents;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
using StellaOps.Concelier.Connector.Distro.Alpine.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public sealed class AlpineConnector : IFeedConnector
{
private const string SchemaVersion = "alpine.secdb.v1";
private readonly SourceFetchService _fetchService;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly AlpineOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AlpineConnector> _logger;
private static readonly Action<ILogger, string, int, Exception?> LogMapped =
LoggerMessage.Define<string, int>(
LogLevel.Information,
new EventId(1, "AlpineMapped"),
"Alpine secdb {Stream} mapped {AdvisoryCount} advisories");
public AlpineConnector(
SourceFetchService fetchService,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<AlpineOptions> options,
TimeProvider? timeProvider,
ILogger<AlpineConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string SourceName => AlpineConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var pendingDocuments = new HashSet<Guid>(cursor.PendingDocuments);
var pendingMappings = new HashSet<Guid>(cursor.PendingMappings);
var fetchCache = new Dictionary<string, AlpineFetchCacheEntry>(cursor.FetchCache, StringComparer.OrdinalIgnoreCase);
var touchedResources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var targets = BuildTargets().ToList();
var maxDocuments = Math.Clamp(_options.MaxDocumentsPerFetch, 1, 200);
var pruneCache = targets.Count <= maxDocuments;
foreach (var target in targets.Take(maxDocuments))
{
cancellationToken.ThrowIfCancellationRequested();
var cacheKey = target.Uri.ToString();
touchedResources.Add(cacheKey);
cursor.TryGetCache(cacheKey, out var cachedEntry);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, cacheKey, cancellationToken).ConfigureAwait(false);
var metadata = BuildMetadata(target.Release, target.Repository, target.Stream, target.Uri);
var request = new SourceFetchRequest(AlpineOptions.HttpClientName, SourceName, target.Uri)
{
Metadata = metadata,
AcceptHeaders = new[] { "application/json" },
TimeoutOverride = _options.FetchTimeout,
ETag = existing?.Etag ?? cachedEntry?.ETag,
LastModified = existing?.LastModified ?? cachedEntry?.LastModified,
};
SourceFetchResult result;
try
{
result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Alpine secdb fetch failed for {Uri}", target.Uri);
await _stateRepository.MarkFailureAsync(SourceName, now, TimeSpan.FromMinutes(5), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
if (result.IsNotModified)
{
if (existing is not null)
{
fetchCache[cacheKey] = new AlpineFetchCacheEntry(existing.Etag, existing.LastModified);
if (string.Equals(existing.Status, DocumentStatuses.Mapped, StringComparison.Ordinal))
{
pendingDocuments.Remove(existing.Id);
pendingMappings.Remove(existing.Id);
}
}
continue;
}
if (!result.IsSuccess || result.Document is null)
{
continue;
}
fetchCache[cacheKey] = AlpineFetchCacheEntry.FromDocument(result.Document);
pendingDocuments.Add(result.Document.Id);
pendingMappings.Remove(result.Document.Id);
if (_options.RequestDelay > TimeSpan.Zero)
{
try
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
break;
}
}
}
if (pruneCache && fetchCache.Count > 0 && touchedResources.Count > 0)
{
var staleKeys = fetchCache.Keys.Where(key => !touchedResources.Contains(key)).ToArray();
foreach (var key in staleKeys)
{
fetchCache.Remove(key);
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithFetchCache(fetchCache);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var remaining = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
remaining.Remove(documentId);
continue;
}
if (!document.PayloadId.HasValue)
{
_logger.LogWarning("Alpine secdb document {DocumentId} missing raw payload", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
continue;
}
byte[] bytes;
try
{
bytes = await _rawDocumentStorage.DownloadAsync(document.PayloadId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to download Alpine secdb document {DocumentId}", document.Id);
throw;
}
AlpineSecDbDto dto;
try
{
var json = Encoding.UTF8.GetString(bytes);
dto = AlpineSecDbParser.Parse(json);
dto = ApplyMetadataFallbacks(dto, document);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to parse Alpine secdb payload for document {DocumentId}", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
continue;
}
var payload = ToDocument(dto);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, SchemaVersion, payload, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
remaining.Remove(document.Id);
if (!pendingMappings.Contains(document.Id))
{
pendingMappings.Add(document.Id);
}
}
var updatedCursor = cursor
.WithPendingDocuments(remaining)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null || document is null)
{
pendingMappings.Remove(documentId);
continue;
}
AlpineSecDbDto dto;
try
{
dto = FromDocument(dtoRecord.Payload);
dto = ApplyMetadataFallbacks(dto, document);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to deserialize Alpine secdb DTO for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
var advisories = AlpineMapper.Map(dto, document, _timeProvider.GetUtcNow());
foreach (var advisory in advisories)
{
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
}
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
if (advisories.Count > 0)
{
var stream = BuildStream(dto);
LogMapped(_logger, stream, advisories.Count, null);
}
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<AlpineCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? AlpineCursor.Empty : AlpineCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(AlpineCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToDocumentObject();
await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private IEnumerable<AlpineTarget> BuildTargets()
{
var releases = NormalizeList(_options.Releases);
var repositories = NormalizeList(_options.Repositories);
foreach (var release in releases)
{
foreach (var repository in repositories)
{
var stream = $"{release}/{repository}";
var relative = $"{release}/{repository}.json";
var uri = new Uri(_options.BaseUri, relative);
yield return new AlpineTarget(release, repository, stream, uri);
}
}
}
private static string[] NormalizeList(string[] values)
{
if (values is null || values.Length == 0)
{
return Array.Empty<string>();
}
return values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static Dictionary<string, string> BuildMetadata(string release, string repository, string stream, Uri uri)
{
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["alpine.release"] = release,
["alpine.repo"] = repository,
["source.stream"] = stream,
["document.id"] = $"alpine:{stream}",
["alpine.uri"] = uri.ToString(),
};
return metadata;
}
private static AlpineSecDbDto ApplyMetadataFallbacks(AlpineSecDbDto dto, DocumentRecord document)
{
if (document.Metadata is null || document.Metadata.Count == 0)
{
return dto;
}
var distro = dto.DistroVersion;
var repo = dto.RepoName;
var prefix = dto.UrlPrefix;
if (string.IsNullOrWhiteSpace(distro) && document.Metadata.TryGetValue("alpine.release", out var release))
{
distro = release;
}
if (string.IsNullOrWhiteSpace(repo) && document.Metadata.TryGetValue("alpine.repo", out var repoValue))
{
repo = repoValue;
}
if (string.IsNullOrWhiteSpace(prefix) && document.Metadata.TryGetValue("alpine.uri", out var uriValue))
{
if (Uri.TryCreate(uriValue, UriKind.Absolute, out var parsed))
{
prefix = parsed.GetLeftPart(UriPartial.Authority) + "/";
}
}
return dto with
{
DistroVersion = distro ?? string.Empty,
RepoName = repo ?? string.Empty,
UrlPrefix = prefix ?? string.Empty
};
}
private static string BuildStream(AlpineSecDbDto dto)
{
var release = dto.DistroVersion?.Trim();
var repo = dto.RepoName?.Trim();
if (!string.IsNullOrWhiteSpace(release) && !string.IsNullOrWhiteSpace(repo))
{
return $"{release}/{repo}";
}
if (!string.IsNullOrWhiteSpace(release))
{
return release;
}
if (!string.IsNullOrWhiteSpace(repo))
{
return repo;
}
return "unknown";
}
private static DocumentObject ToDocument(AlpineSecDbDto dto)
{
var packages = new DocumentArray();
foreach (var package in dto.Packages)
{
var secfixes = new DocumentObject();
foreach (var pair in package.Secfixes.OrderBy(pair => pair.Key, StringComparer.OrdinalIgnoreCase))
{
var cves = pair.Value ?? Array.Empty<string>();
var ordered = cves
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
secfixes[pair.Key] = new DocumentArray(ordered);
}
packages.Add(new DocumentObject
{
["name"] = package.Name,
["secfixes"] = secfixes
});
}
var doc = new DocumentObject
{
["distroVersion"] = dto.DistroVersion,
["repoName"] = dto.RepoName,
["urlPrefix"] = dto.UrlPrefix,
["packages"] = packages
};
return doc;
}
private static AlpineSecDbDto FromDocument(DocumentObject document)
{
var distroVersion = document.GetValue("distroVersion", string.Empty).AsString;
var repoName = document.GetValue("repoName", string.Empty).AsString;
var urlPrefix = document.GetValue("urlPrefix", string.Empty).AsString;
var packages = new List<AlpinePackageDto>();
if (document.TryGetValue("packages", out var packageValue) && packageValue is DocumentArray packageArray)
{
foreach (var element in packageArray.OfType<DocumentObject>())
{
var name = element.GetValue("name", string.Empty).AsString;
if (string.IsNullOrWhiteSpace(name))
{
continue;
}
var secfixes = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
if (element.TryGetValue("secfixes", out var secfixesValue) && secfixesValue is DocumentObject secfixesDoc)
{
foreach (var entry in secfixesDoc.Elements)
{
if (string.IsNullOrWhiteSpace(entry.Name))
{
continue;
}
if (entry.Value is not DocumentArray cveArray)
{
continue;
}
var cves = cveArray
.OfType<DocumentValue>()
.Select(static value => value.ToString())
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value!.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
if (cves.Length > 0)
{
secfixes[entry.Name] = cves;
}
}
}
packages.Add(new AlpinePackageDto(name.Trim(), secfixes));
}
}
var orderedPackages = packages
.OrderBy(pkg => pkg.Name, StringComparer.OrdinalIgnoreCase)
.Select(static pkg => pkg with { Secfixes = OrderSecfixes(pkg.Secfixes) })
.ToList();
return new AlpineSecDbDto(distroVersion, repoName, urlPrefix, orderedPackages);
}
private static IReadOnlyDictionary<string, string[]> OrderSecfixes(IReadOnlyDictionary<string, string[]> secfixes)
{
if (secfixes is null || secfixes.Count == 0)
{
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
}
var ordered = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in secfixes.OrderBy(pair => pair.Key, StringComparer.OrdinalIgnoreCase))
{
var values = pair.Value ?? Array.Empty<string>();
ordered[pair.Key] = values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
return ordered;
}
private sealed record AlpineTarget(string Release, string Repository, string Stream, Uri Uri);
}

View File

@@ -0,0 +1,20 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public sealed class AlpineConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "distro-alpine";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services) => services is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<AlpineConnector>(services);
}
}

View File

@@ -0,0 +1,53 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public sealed class AlpineDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "concelier:sources:alpine";
private const string FetchSchedule = "*/30 * * * *";
private const string ParseSchedule = "7,37 * * * *";
private const string MapSchedule = "12,42 * * * *";
private static readonly TimeSpan FetchTimeout = TimeSpan.FromMinutes(5);
private static readonly TimeSpan ParseTimeout = TimeSpan.FromMinutes(6);
private static readonly TimeSpan MapTimeout = TimeSpan.FromMinutes(8);
private static readonly TimeSpan LeaseDuration = TimeSpan.FromMinutes(4);
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddAlpineConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
var scheduler = new JobSchedulerBuilder(services);
scheduler
.AddJob<AlpineFetchJob>(
AlpineJobKinds.Fetch,
cronExpression: FetchSchedule,
timeout: FetchTimeout,
leaseDuration: LeaseDuration)
.AddJob<AlpineParseJob>(
AlpineJobKinds.Parse,
cronExpression: ParseSchedule,
timeout: ParseTimeout,
leaseDuration: LeaseDuration)
.AddJob<AlpineMapJob>(
AlpineJobKinds.Map,
cronExpression: MapSchedule,
timeout: MapTimeout,
leaseDuration: LeaseDuration);
return services;
}
}

View File

@@ -0,0 +1,35 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Http;
using StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public static class AlpineServiceCollectionExtensions
{
public static IServiceCollection AddAlpineConnector(this IServiceCollection services, Action<AlpineOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.AddOptions<AlpineOptions>()
.Configure(configure)
.PostConfigure(static options => options.Validate());
services.AddSourceHttpClient(AlpineOptions.HttpClientName, (sp, httpOptions) =>
{
var options = sp.GetRequiredService<IOptions<AlpineOptions>>().Value;
var authority = options.BaseUri.GetLeftPart(UriPartial.Authority);
httpOptions.BaseAddress = string.IsNullOrWhiteSpace(authority) ? options.BaseUri : new Uri(authority);
httpOptions.Timeout = options.FetchTimeout;
httpOptions.UserAgent = options.UserAgent;
httpOptions.AllowedHosts.Clear();
httpOptions.AllowedHosts.Add(options.BaseUri.Host);
httpOptions.DefaultRequestHeaders["Accept"] = "application/json";
});
services.AddTransient<AlpineConnector>();
return services;
}
}

View File

@@ -0,0 +1,5 @@
using System.Runtime.CompilerServices;
using StellaOps.Plugin.Versioning;
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.Alpine.Tests")]
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]

View File

@@ -0,0 +1,77 @@
using System;
using System.Linq;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
public sealed class AlpineOptions
{
public const string HttpClientName = "concelier.alpine";
/// <summary>
/// Base URI for Alpine secdb JSON content.
/// </summary>
public Uri BaseUri { get; set; } = new("https://secdb.alpinelinux.org/");
/// <summary>
/// Releases to fetch (for example: v3.18, v3.19, v3.20, edge).
/// </summary>
public string[] Releases { get; set; } = new[] { "v3.18", "v3.19", "v3.20", "edge" };
/// <summary>
/// Repository names to fetch (for example: main, community).
/// </summary>
public string[] Repositories { get; set; } = new[] { "main", "community" };
/// <summary>
/// Cap on release+repo documents fetched in a single run.
/// </summary>
public int MaxDocumentsPerFetch { get; set; } = 20;
/// <summary>
/// Fetch timeout for each secdb request.
/// </summary>
public TimeSpan FetchTimeout { get; set; } = TimeSpan.FromSeconds(45);
/// <summary>
/// Optional pacing delay between secdb requests.
/// </summary>
public TimeSpan RequestDelay { get; set; } = TimeSpan.Zero;
/// <summary>
/// Custom user-agent for secdb requests.
/// </summary>
public string UserAgent { get; set; } = "StellaOps.Concelier.Alpine/0.1 (+https://stella-ops.org)";
public void Validate()
{
if (BaseUri is null || !BaseUri.IsAbsoluteUri)
{
throw new InvalidOperationException("Alpine BaseUri must be an absolute URI.");
}
if (MaxDocumentsPerFetch <= 0 || MaxDocumentsPerFetch > 200)
{
throw new InvalidOperationException("MaxDocumentsPerFetch must be between 1 and 200.");
}
if (FetchTimeout <= TimeSpan.Zero || FetchTimeout > TimeSpan.FromMinutes(5))
{
throw new InvalidOperationException("FetchTimeout must be positive and less than five minutes.");
}
if (RequestDelay < TimeSpan.Zero || RequestDelay > TimeSpan.FromSeconds(10))
{
throw new InvalidOperationException("RequestDelay must be between 0 and 10 seconds.");
}
if (Releases is null || Releases.Length == 0 || Releases.All(static value => string.IsNullOrWhiteSpace(value)))
{
throw new InvalidOperationException("At least one Alpine release must be configured.");
}
if (Repositories is null || Repositories.Length == 0 || Repositories.All(static value => string.IsNullOrWhiteSpace(value)))
{
throw new InvalidOperationException("At least one Alpine repository must be configured.");
}
}
}

View File

@@ -0,0 +1,13 @@
using System.Collections.Generic;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Dto;
internal sealed record AlpineSecDbDto(
string DistroVersion,
string RepoName,
string UrlPrefix,
IReadOnlyList<AlpinePackageDto> Packages);
internal sealed record AlpinePackageDto(
string Name,
IReadOnlyDictionary<string, string[]> Secfixes);

View File

@@ -0,0 +1,119 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Concelier.Documents;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal sealed record AlpineCursor(
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings,
IReadOnlyDictionary<string, AlpineFetchCacheEntry> FetchCache)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuidList = Array.Empty<Guid>();
private static readonly IReadOnlyDictionary<string, AlpineFetchCacheEntry> EmptyCache =
new Dictionary<string, AlpineFetchCacheEntry>(StringComparer.OrdinalIgnoreCase);
public static AlpineCursor Empty { get; } = new(EmptyGuidList, EmptyGuidList, EmptyCache);
public static AlpineCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var pendingDocuments = ReadGuidSet(document, "pendingDocuments");
var pendingMappings = ReadGuidSet(document, "pendingMappings");
var cache = ReadCache(document);
return new AlpineCursor(pendingDocuments, pendingMappings, cache);
}
public DocumentObject ToDocumentObject()
{
var doc = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()))
};
if (FetchCache.Count > 0)
{
var cacheDoc = new DocumentObject();
foreach (var (key, entry) in FetchCache)
{
cacheDoc[key] = entry.ToDocumentObject();
}
doc["fetchCache"] = cacheDoc;
}
return doc;
}
public AlpineCursor WithPendingDocuments(IEnumerable<Guid> ids)
=> this with { PendingDocuments = ids?.Distinct().ToArray() ?? EmptyGuidList };
public AlpineCursor WithPendingMappings(IEnumerable<Guid> ids)
=> this with { PendingMappings = ids?.Distinct().ToArray() ?? EmptyGuidList };
public AlpineCursor WithFetchCache(IDictionary<string, AlpineFetchCacheEntry>? cache)
{
if (cache is null || cache.Count == 0)
{
return this with { FetchCache = EmptyCache };
}
return this with { FetchCache = new Dictionary<string, AlpineFetchCacheEntry>(cache, StringComparer.OrdinalIgnoreCase) };
}
public bool TryGetCache(string key, out AlpineFetchCacheEntry entry)
{
if (FetchCache.Count == 0)
{
entry = AlpineFetchCacheEntry.Empty;
return false;
}
return FetchCache.TryGetValue(key, out entry!);
}
private static IReadOnlyCollection<Guid> ReadGuidSet(DocumentObject document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not DocumentArray array)
{
return EmptyGuidList;
}
var list = new List<Guid>(array.Count);
foreach (var element in array)
{
if (Guid.TryParse(element.ToString(), out var guid))
{
list.Add(guid);
}
}
return list;
}
private static IReadOnlyDictionary<string, AlpineFetchCacheEntry> ReadCache(DocumentObject document)
{
if (!document.TryGetValue("fetchCache", out var value) || value is not DocumentObject cacheDoc || cacheDoc.ElementCount == 0)
{
return EmptyCache;
}
var cache = new Dictionary<string, AlpineFetchCacheEntry>(StringComparer.OrdinalIgnoreCase);
foreach (var element in cacheDoc.Elements)
{
if (element.Value is DocumentObject entryDoc)
{
cache[element.Name] = AlpineFetchCacheEntry.FromDocument(entryDoc);
}
}
return cache;
}
}

View File

@@ -0,0 +1,77 @@
using System;
using StellaOps.Concelier.Documents;
using StorageContracts = StellaOps.Concelier.Storage.Contracts;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal sealed record AlpineFetchCacheEntry(string? ETag, DateTimeOffset? LastModified)
{
public static AlpineFetchCacheEntry Empty { get; } = new(null, null);
public static AlpineFetchCacheEntry FromDocument(StorageContracts.StorageDocument document)
=> new(document.Etag, document.LastModified);
public static AlpineFetchCacheEntry FromDocument(DocumentObject document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
string? etag = null;
DateTimeOffset? lastModified = null;
if (document.TryGetValue("etag", out var etagValue) && etagValue.DocumentType == DocumentType.String)
{
etag = etagValue.AsString;
}
if (document.TryGetValue("lastModified", out var modifiedValue))
{
lastModified = modifiedValue.DocumentType switch
{
DocumentType.DateTime => DateTime.SpecifyKind(modifiedValue.ToUniversalTime(), DateTimeKind.Utc),
DocumentType.String when DateTimeOffset.TryParse(modifiedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null
};
}
return new AlpineFetchCacheEntry(etag, lastModified);
}
public DocumentObject ToDocumentObject()
{
var doc = new DocumentObject();
if (!string.IsNullOrWhiteSpace(ETag))
{
doc["etag"] = ETag;
}
if (LastModified.HasValue)
{
doc["lastModified"] = LastModified.Value.UtcDateTime;
}
return doc;
}
public bool Matches(StorageContracts.StorageDocument document)
{
if (document is null)
{
return false;
}
if (!string.Equals(ETag, document.Etag, StringComparison.Ordinal))
{
return false;
}
if (LastModified.HasValue && document.LastModified.HasValue)
{
return LastModified.Value.UtcDateTime == document.LastModified.Value.UtcDateTime;
}
return !LastModified.HasValue && !document.LastModified.HasValue;
}
}

View File

@@ -0,0 +1,348 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal static class AlpineMapper
{
public static IReadOnlyList<Advisory> Map(AlpineSecDbDto dto, DocumentRecord document, DateTimeOffset recordedAt)
{
ArgumentNullException.ThrowIfNull(dto);
ArgumentNullException.ThrowIfNull(document);
if (dto.Packages is null || dto.Packages.Count == 0)
{
return Array.Empty<Advisory>();
}
var platform = BuildPlatform(dto);
var advisoryBuckets = new Dictionary<string, AdvisoryAccumulator>(StringComparer.OrdinalIgnoreCase);
foreach (var package in dto.Packages)
{
if (string.IsNullOrWhiteSpace(package.Name) || package.Secfixes is null || package.Secfixes.Count == 0)
{
continue;
}
var packageName = package.Name.Trim();
foreach (var (fixedVersion, ids) in package.Secfixes.OrderBy(kvp => kvp.Key, StringComparer.OrdinalIgnoreCase))
{
if (string.IsNullOrWhiteSpace(fixedVersion) || ids is null || ids.Length == 0)
{
continue;
}
var versionValue = fixedVersion.Trim();
foreach (var rawId in ids)
{
if (string.IsNullOrWhiteSpace(rawId))
{
continue;
}
var normalizedId = NormalizeAlias(rawId);
var advisoryKey = BuildAdvisoryKey(normalizedId);
if (string.IsNullOrWhiteSpace(advisoryKey))
{
continue;
}
if (!advisoryBuckets.TryGetValue(advisoryKey, out var bucket))
{
bucket = new AdvisoryAccumulator(advisoryKey, BuildAliases(advisoryKey, normalizedId));
advisoryBuckets[advisoryKey] = bucket;
}
else
{
bucket.Aliases.Add(normalizedId);
bucket.Aliases.Add(advisoryKey);
}
var packageKey = BuildPackageKey(platform, packageName);
if (!bucket.Packages.TryGetValue(packageKey, out var pkgAccumulator))
{
pkgAccumulator = new PackageAccumulator(packageName, platform);
bucket.Packages[packageKey] = pkgAccumulator;
}
var rangeProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"range",
BuildRangeProvenanceKey(normalizedId, platform, packageName, versionValue),
recordedAt);
var packageProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"affected",
BuildPackageProvenanceKey(normalizedId, platform, packageName),
recordedAt);
var vendorExtensions = BuildVendorExtensions(dto, versionValue);
var primitives = vendorExtensions.Count == 0
? null
: new RangePrimitives(
SemVer: null,
Nevra: null,
Evr: null,
VendorExtensions: vendorExtensions);
var rangeExpression = $"fixed:{versionValue}";
var range = new AffectedVersionRange(
rangeKind: "apk",
introducedVersion: null,
fixedVersion: versionValue,
lastAffectedVersion: null,
rangeExpression: rangeExpression,
provenance: rangeProvenance,
primitives: primitives);
pkgAccumulator.Ranges.Add(range);
pkgAccumulator.Provenance.Add(packageProvenance);
pkgAccumulator.Statuses.Add(new AffectedPackageStatus("resolved", packageProvenance));
var normalizedRule = range.ToNormalizedVersionRule(BuildNormalizedNote(platform));
if (normalizedRule is not null)
{
pkgAccumulator.NormalizedRules.Add(normalizedRule);
}
}
}
}
if (advisoryBuckets.Count == 0)
{
return Array.Empty<Advisory>();
}
var fetchProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"document",
document.Uri,
document.FetchedAt.ToUniversalTime());
var published = document.LastModified?.ToUniversalTime() ?? document.FetchedAt.ToUniversalTime();
var advisories = new List<Advisory>(advisoryBuckets.Count);
foreach (var bucket in advisoryBuckets.Values.OrderBy(b => b.AdvisoryKey, StringComparer.OrdinalIgnoreCase))
{
var aliases = bucket.Aliases
.Where(static alias => !string.IsNullOrWhiteSpace(alias))
.Select(static alias => alias.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase)
.ToArray();
var references = BuildReferences(document, recordedAt);
var packages = bucket.Packages.Values
.Select(static pkg => pkg.Build())
.Where(static pkg => pkg.VersionRanges.Length > 0)
.OrderBy(static pkg => pkg.Platform, StringComparer.OrdinalIgnoreCase)
.ThenBy(static pkg => pkg.Identifier, StringComparer.OrdinalIgnoreCase)
.ToArray();
var mappingProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"mapping",
bucket.AdvisoryKey,
recordedAt);
advisories.Add(new Advisory(
advisoryKey: bucket.AdvisoryKey,
title: DetermineTitle(aliases, bucket.AdvisoryKey),
summary: null,
language: "en",
published: published,
modified: recordedAt > published ? recordedAt : published,
severity: null,
exploitKnown: false,
aliases: aliases,
references: references,
affectedPackages: packages,
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { fetchProvenance, mappingProvenance }));
}
return advisories;
}
private static string? BuildPlatform(AlpineSecDbDto dto)
{
var release = (dto.DistroVersion ?? string.Empty).Trim();
var repo = (dto.RepoName ?? string.Empty).Trim();
if (string.IsNullOrWhiteSpace(release) && string.IsNullOrWhiteSpace(repo))
{
return null;
}
if (string.IsNullOrWhiteSpace(release))
{
return repo;
}
if (string.IsNullOrWhiteSpace(repo))
{
return release;
}
return $"{release}/{repo}";
}
private static string DetermineTitle(string[] aliases, string advisoryKey)
{
if (aliases.Length > 0)
{
return aliases[0];
}
return advisoryKey;
}
private static AdvisoryReference[] BuildReferences(DocumentRecord document, DateTimeOffset recordedAt)
{
var provenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"reference",
document.Uri,
recordedAt);
return new[]
{
new AdvisoryReference(document.Uri, kind: "advisory", sourceTag: "secdb", summary: null, provenance: provenance)
};
}
private static Dictionary<string, string> BuildVendorExtensions(AlpineSecDbDto dto, string fixedVersion)
{
var extensions = new Dictionary<string, string>(StringComparer.Ordinal);
AddExtension(extensions, "alpine.distroversion", dto.DistroVersion);
AddExtension(extensions, "alpine.repo", dto.RepoName);
AddExtension(extensions, "alpine.fixed", fixedVersion);
AddExtension(extensions, "alpine.urlprefix", dto.UrlPrefix);
return extensions;
}
private static void AddExtension(IDictionary<string, string> extensions, string key, string? value)
{
if (!string.IsNullOrWhiteSpace(value))
{
extensions[key] = value.Trim();
}
}
private static string NormalizeAlias(string value)
{
var trimmed = value.Trim();
if (trimmed.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
{
return trimmed.ToUpperInvariant();
}
return trimmed;
}
private static string BuildAdvisoryKey(string normalizedId)
{
if (string.IsNullOrWhiteSpace(normalizedId))
{
return string.Empty;
}
return $"alpine/{normalizedId.ToLowerInvariant()}";
}
private static HashSet<string> BuildAliases(string advisoryKey, string normalizedId)
{
var aliases = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (!string.IsNullOrWhiteSpace(advisoryKey))
{
aliases.Add(advisoryKey);
}
if (!string.IsNullOrWhiteSpace(normalizedId))
{
aliases.Add(normalizedId);
}
return aliases;
}
private static string? BuildNormalizedNote(string? platform)
=> string.IsNullOrWhiteSpace(platform) ? null : $"alpine:{platform.Trim()}";
private static string BuildPackageKey(string? platform, string package)
=> string.IsNullOrWhiteSpace(platform) ? package : $"{platform}:{package}";
private static string BuildRangeProvenanceKey(string advisoryId, string? platform, string package, string fixedVersion)
{
if (string.IsNullOrWhiteSpace(platform))
{
return $"{advisoryId}:{package}:{fixedVersion}";
}
return $"{advisoryId}:{platform}:{package}:{fixedVersion}";
}
private static string BuildPackageProvenanceKey(string advisoryId, string? platform, string package)
{
if (string.IsNullOrWhiteSpace(platform))
{
return $"{advisoryId}:{package}";
}
return $"{advisoryId}:{platform}:{package}";
}
private sealed class AdvisoryAccumulator
{
public AdvisoryAccumulator(string advisoryKey, HashSet<string> aliases)
{
AdvisoryKey = advisoryKey;
Aliases = aliases;
Packages = new Dictionary<string, PackageAccumulator>(StringComparer.OrdinalIgnoreCase);
}
public string AdvisoryKey { get; }
public HashSet<string> Aliases { get; }
public Dictionary<string, PackageAccumulator> Packages { get; }
}
private sealed class PackageAccumulator
{
public PackageAccumulator(string identifier, string? platform)
{
Identifier = identifier;
Platform = platform;
}
public string Identifier { get; }
public string? Platform { get; }
public List<AffectedVersionRange> Ranges { get; } = new();
public List<AffectedPackageStatus> Statuses { get; } = new();
public List<AdvisoryProvenance> Provenance { get; } = new();
public List<NormalizedVersionRule> NormalizedRules { get; } = new();
public AffectedPackage Build()
=> new(
type: AffectedPackageTypes.Apk,
identifier: Identifier,
platform: Platform,
versionRanges: Ranges,
statuses: Statuses,
provenance: Provenance,
normalizedVersions: NormalizedRules);
}
}

View File

@@ -0,0 +1,148 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal static class AlpineSecDbParser
{
public static AlpineSecDbDto Parse(string json)
{
if (string.IsNullOrWhiteSpace(json))
{
throw new ArgumentException("SecDB payload cannot be empty.", nameof(json));
}
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
if (root.ValueKind != JsonValueKind.Object)
{
throw new FormatException("SecDB payload must be a JSON object.");
}
var distroVersion = ReadString(root, "distroversion") ?? string.Empty;
var repoName = ReadString(root, "reponame") ?? string.Empty;
var urlPrefix = ReadString(root, "urlprefix") ?? string.Empty;
var packages = new List<AlpinePackageDto>();
if (root.TryGetProperty("packages", out var packagesElement) && packagesElement.ValueKind == JsonValueKind.Array)
{
foreach (var element in packagesElement.EnumerateArray())
{
if (element.ValueKind != JsonValueKind.Object)
{
continue;
}
if (!element.TryGetProperty("pkg", out var pkgElement) || pkgElement.ValueKind != JsonValueKind.Object)
{
continue;
}
var name = ReadString(pkgElement, "name");
if (string.IsNullOrWhiteSpace(name))
{
continue;
}
var secfixes = ReadSecfixes(pkgElement);
packages.Add(new AlpinePackageDto(name.Trim(), secfixes));
}
}
var orderedPackages = packages
.OrderBy(pkg => pkg.Name, StringComparer.OrdinalIgnoreCase)
.Select(static pkg => pkg with { Secfixes = OrderSecfixes(pkg.Secfixes) })
.ToList();
return new AlpineSecDbDto(distroVersion, repoName, urlPrefix, orderedPackages);
}
private static IReadOnlyDictionary<string, string[]> ReadSecfixes(JsonElement pkgElement)
{
if (!pkgElement.TryGetProperty("secfixes", out var fixesElement) || fixesElement.ValueKind != JsonValueKind.Object)
{
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
foreach (var property in fixesElement.EnumerateObject())
{
var version = property.Name?.Trim();
if (string.IsNullOrWhiteSpace(version))
{
continue;
}
var cves = ReadStringArray(property.Value);
if (cves.Length == 0)
{
continue;
}
result[version] = cves;
}
return result;
}
private static string[] ReadStringArray(JsonElement element)
{
if (element.ValueKind != JsonValueKind.Array)
{
return Array.Empty<string>();
}
var items = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var entry in element.EnumerateArray())
{
if (entry.ValueKind != JsonValueKind.String)
{
continue;
}
var value = entry.GetString();
if (string.IsNullOrWhiteSpace(value))
{
continue;
}
items.Add(value.Trim());
}
return items.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase).ToArray();
}
private static string? ReadString(JsonElement element, string name)
{
if (!element.TryGetProperty(name, out var value) || value.ValueKind != JsonValueKind.String)
{
return null;
}
return value.GetString();
}
private static IReadOnlyDictionary<string, string[]> OrderSecfixes(IReadOnlyDictionary<string, string[]> secfixes)
{
if (secfixes is null || secfixes.Count == 0)
{
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
}
var ordered = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in secfixes.OrderBy(pair => pair.Key, StringComparer.OrdinalIgnoreCase))
{
ordered[pair.Key] = pair.Value
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
return ordered;
}
}

View File

@@ -0,0 +1,46 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Jobs;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
internal static class AlpineJobKinds
{
public const string Fetch = "source:alpine:fetch";
public const string Parse = "source:alpine:parse";
public const string Map = "source:alpine:map";
}
internal sealed class AlpineFetchJob : IJob
{
private readonly AlpineConnector _connector;
public AlpineFetchJob(AlpineConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class AlpineParseJob : IJob
{
private readonly AlpineConnector _connector;
public AlpineParseJob(AlpineConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class AlpineMapJob : IJob
{
private readonly AlpineConnector _connector;
public AlpineMapJob(AlpineConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,17 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,13 @@
# Concelier Alpine Connector Tasks
Local status mirror for `docs/implplan/SPRINT_2000_0003_0001_alpine_connector.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| T1 | DONE | APK version comparer + tests. |
| T2 | DONE | SecDB parser. |
| T3 | DOING | Alpine connector fetch/parse/map. |
| T4 | TODO | DI + config + health check wiring. |
| T5 | TODO | Tests, fixtures, and snapshots. |
Last synced: 2025-12-22 (UTC).

View File

@@ -0,0 +1,35 @@
# AGENTS.md - EPSS Connector
## Purpose
Ingests EPSS (Exploit Prediction Scoring System) scores from FIRST.org to provide exploitation probability signals for CVE prioritization.
## Data Source
- **URL**: https://epss.empiricalsecurity.com/
- **Format**: `epss_scores-YYYY-MM-DD.csv.gz` (gzip-compressed CSV)
- **Update cadence**: Daily snapshot (typically published ~08:00 UTC)
- **Offline bundle**: Directory or file path with optional `manifest.json`
## Data Flow
1. Fetch daily snapshot via HTTP or air-gapped bundle path.
2. Parse with `StellaOps.Scanner.Storage.Epss.EpssCsvStreamParser` for deterministic row counts and content hash.
3. Map rows to `EpssObservation` records with band classification (Low/Medium/High/Critical).
4. Store raw document + DTO metadata; mapping currently records counts and marks documents mapped.
## Configuration
```yaml
concelier:
sources:
epss:
baseUri: "https://epss.empiricalsecurity.com/"
fetchCurrent: true
catchUpDays: 7
httpTimeout: "00:02:00"
maxRetries: 3
airgapMode: false
bundlePath: "/var/stellaops/bundles/epss"
```
## Orchestrator Registration
- ConnectorId: `epss`
- Default Schedule: Daily 10:00 UTC
- Egress Allowlist: `epss.empiricalsecurity.com`

View File

@@ -0,0 +1,59 @@
using System.Diagnostics.CodeAnalysis;
namespace StellaOps.Concelier.Connector.Epss.Configuration;
public sealed class EpssOptions
{
public const string SectionName = "Concelier:Epss";
public const string HttpClientName = "source.epss";
public Uri BaseUri { get; set; } = new("https://epss.empiricalsecurity.com/", UriKind.Absolute);
public bool FetchCurrent { get; set; } = true;
public int CatchUpDays { get; set; } = 7;
public TimeSpan HttpTimeout { get; set; } = TimeSpan.FromMinutes(2);
public int MaxRetries { get; set; } = 3;
public bool AirgapMode { get; set; }
public string? BundlePath { get; set; }
public string UserAgent { get; set; } = "StellaOps.Concelier.Epss/1.0";
[MemberNotNull(nameof(BaseUri), nameof(UserAgent))]
public void Validate()
{
if (BaseUri is null || !BaseUri.IsAbsoluteUri)
{
throw new InvalidOperationException("BaseUri must be an absolute URI.");
}
if (CatchUpDays < 0)
{
throw new InvalidOperationException("CatchUpDays cannot be negative.");
}
if (HttpTimeout <= TimeSpan.Zero)
{
throw new InvalidOperationException("HttpTimeout must be greater than zero.");
}
if (MaxRetries < 0)
{
throw new InvalidOperationException("MaxRetries cannot be negative.");
}
if (string.IsNullOrWhiteSpace(UserAgent))
{
throw new InvalidOperationException("UserAgent must be provided.");
}
if (AirgapMode && string.IsNullOrWhiteSpace(BundlePath))
{
throw new InvalidOperationException("BundlePath must be provided when AirgapMode is enabled.");
}
}
}

View File

@@ -0,0 +1,24 @@
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Concelier.Connector.Epss.Internal;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Epss;
/// <summary>
/// Plugin entry point for EPSS feed connector.
/// </summary>
public sealed class EpssConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "epss";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services)
=> services.GetService<EpssConnector>() is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return services.GetRequiredService<EpssConnector>();
}
}

View File

@@ -0,0 +1,54 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Connector.Epss.Configuration;
using StellaOps.DependencyInjection;
namespace StellaOps.Concelier.Connector.Epss;
public sealed class EpssDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "concelier:sources:epss";
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddEpssConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
services.AddTransient<EpssFetchJob>();
services.AddTransient<EpssParseJob>();
services.AddTransient<EpssMapJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
EnsureJob(options, EpssJobKinds.Fetch, typeof(EpssFetchJob));
EnsureJob(options, EpssJobKinds.Parse, typeof(EpssParseJob));
EnsureJob(options, EpssJobKinds.Map, typeof(EpssMapJob));
});
return services;
}
private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType)
{
if (options.Definitions.ContainsKey(kind))
{
return;
}
options.Definitions[kind] = new JobDefinition(
kind,
jobType,
options.DefaultTimeout,
options.DefaultLeaseDuration,
CronExpression: null,
Enabled: true);
}
}

View File

@@ -0,0 +1,40 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Http;
using StellaOps.Concelier.Connector.Epss.Configuration;
using StellaOps.Concelier.Connector.Epss.Internal;
namespace StellaOps.Concelier.Connector.Epss;
public static class EpssServiceCollectionExtensions
{
public static IServiceCollection AddEpssConnector(this IServiceCollection services, Action<EpssOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.AddOptions<EpssOptions>()
.Configure(configure)
.PostConfigure(static opts => opts.Validate());
services.AddSourceHttpClient(EpssOptions.HttpClientName, (sp, clientOptions) =>
{
var options = sp.GetRequiredService<IOptions<EpssOptions>>().Value;
clientOptions.BaseAddress = options.BaseUri;
clientOptions.Timeout = options.HttpTimeout;
clientOptions.UserAgent = options.UserAgent;
clientOptions.MaxAttempts = Math.Max(1, options.MaxRetries + 1);
clientOptions.AllowedHosts.Clear();
clientOptions.AllowedHosts.Add(options.BaseUri.Host);
clientOptions.DefaultRequestHeaders["Accept"] = "application/gzip,application/octet-stream,application/x-gzip";
});
services.AddSingleton<EpssDiagnostics>();
services.AddTransient<EpssConnector>();
services.AddTransient<EpssFetchJob>();
services.AddTransient<EpssParseJob>();
services.AddTransient<EpssMapJob>();
return services;
}
}

View File

@@ -0,0 +1,778 @@
using System.Globalization;
using System.Net;
using System.Net.Http.Headers;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Epss.Configuration;
using StellaOps.Concelier.Documents;
using StellaOps.Concelier.Storage;
using StellaOps.Cryptography;
using StellaOps.Plugin;
using StellaOps.Scanner.Storage.Epss;
namespace StellaOps.Concelier.Connector.Epss.Internal;
public sealed class EpssConnector : IFeedConnector
{
private const string DtoSchemaVersion = "epss.snapshot.v1";
private const string ManifestFileName = "manifest.json";
private static readonly string[] AcceptTypes = { "application/gzip", "application/octet-stream", "application/x-gzip" };
private readonly IHttpClientFactory _httpClientFactory;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly ISourceStateRepository _stateRepository;
private readonly EpssOptions _options;
private readonly EpssDiagnostics _diagnostics;
private readonly ICryptoHash _hash;
private readonly TimeProvider _timeProvider;
private readonly ILogger<EpssConnector> _logger;
private readonly EpssCsvStreamParser _parser = new();
public EpssConnector(
IHttpClientFactory httpClientFactory,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
ISourceStateRepository stateRepository,
IOptions<EpssOptions> options,
EpssDiagnostics diagnostics,
ICryptoHash hash,
TimeProvider? timeProvider,
ILogger<EpssConnector> logger)
{
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_hash = hash ?? throw new ArgumentNullException(nameof(hash));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string SourceName => EpssConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var pendingDocuments = cursor.PendingDocuments.ToHashSet();
var pendingMappings = cursor.PendingMappings.ToHashSet();
var now = _timeProvider.GetUtcNow();
var nowDate = DateOnly.FromDateTime(now.UtcDateTime);
var candidates = GetCandidateDates(cursor, nowDate).ToArray();
if (candidates.Length == 0)
{
return;
}
_diagnostics.FetchAttempt();
EpssFetchResult? fetchResult = null;
try
{
foreach (var date in candidates)
{
cancellationToken.ThrowIfCancellationRequested();
fetchResult = _options.AirgapMode
? await TryFetchFromBundleAsync(date, cancellationToken).ConfigureAwait(false)
: await TryFetchFromHttpAsync(date, cursor, cancellationToken).ConfigureAwait(false);
if (fetchResult is not null)
{
break;
}
}
if (fetchResult is null)
{
_logger.LogWarning("EPSS fetch: no snapshot found for {CandidateCount} candidate dates.", candidates.Length);
return;
}
if (fetchResult.IsNotModified)
{
_diagnostics.FetchUnchanged();
var unchangedCursor = cursor.WithSnapshotMetadata(
cursor.ModelVersion,
cursor.LastProcessedDate,
fetchResult.ETag ?? cursor.ETag,
cursor.ContentHash,
cursor.LastRowCount,
now);
await UpdateCursorAsync(unchangedCursor, cancellationToken).ConfigureAwait(false);
return;
}
if (!fetchResult.IsSuccess || fetchResult.Content is null)
{
_diagnostics.FetchFailure();
await _stateRepository.MarkFailureAsync(SourceName, now, TimeSpan.FromMinutes(5), "EPSS fetch returned no content.", cancellationToken).ConfigureAwait(false);
return;
}
var record = await StoreSnapshotAsync(fetchResult, now, cancellationToken).ConfigureAwait(false);
pendingDocuments.Add(record.Id);
pendingMappings.Remove(record.Id);
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithSnapshotMetadata(
cursor.ModelVersion,
cursor.LastProcessedDate,
fetchResult.ETag,
cursor.ContentHash,
cursor.LastRowCount,
now);
_diagnostics.FetchSuccess();
_logger.LogInformation(
"Fetched EPSS snapshot {SnapshotDate} ({Uri}) document {DocumentId} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}",
fetchResult.SnapshotDate,
fetchResult.SourceUri,
record.Id,
pendingDocuments.Count,
pendingMappings.Count);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_diagnostics.FetchFailure();
_logger.LogError(ex, "EPSS fetch failed for {BaseUri}", _options.BaseUri);
await _stateRepository.MarkFailureAsync(SourceName, now, TimeSpan.FromMinutes(5), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var remainingDocuments = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToHashSet();
var cursorState = cursor;
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
remainingDocuments.Remove(documentId);
continue;
}
if (!document.PayloadId.HasValue)
{
_diagnostics.ParseFailure("missing_payload");
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remainingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
byte[] payload;
try
{
payload = await _rawDocumentStorage.DownloadAsync(document.PayloadId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_diagnostics.ParseFailure("download");
_logger.LogError(ex, "EPSS parse failed downloading document {DocumentId}", document.Id);
throw;
}
EpssCsvStreamParser.EpssCsvParseSession session;
try
{
await using var stream = new MemoryStream(payload, writable: false);
await using var parseSession = _parser.ParseGzip(stream);
session = parseSession;
await foreach (var _ in parseSession.WithCancellation(cancellationToken).ConfigureAwait(false))
{
}
}
catch (Exception ex)
{
_diagnostics.ParseFailure("parse");
_logger.LogWarning(ex, "EPSS parse failed for document {DocumentId}", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remainingDocuments.Remove(documentId);
pendingMappings.Remove(documentId);
continue;
}
var publishedDate = session.PublishedDate ?? TryParseDateFromMetadata(document.Metadata) ?? DateOnly.FromDateTime(document.CreatedAt.UtcDateTime);
var modelVersion = string.IsNullOrWhiteSpace(session.ModelVersionTag) ? "unknown" : session.ModelVersionTag!;
var contentHash = session.DecompressedSha256 ?? string.Empty;
var payloadDoc = new DocumentObject
{
["modelVersion"] = modelVersion,
["publishedDate"] = publishedDate.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture),
["rowCount"] = session.RowCount,
["contentHash"] = contentHash
};
var dtoRecord = new DtoRecord(
Guid.NewGuid(),
document.Id,
SourceName,
DtoSchemaVersion,
payloadDoc,
_timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
var metadata = document.Metadata is null
? new Dictionary<string, string>(StringComparer.Ordinal)
: new Dictionary<string, string>(document.Metadata, StringComparer.Ordinal);
metadata["epss.modelVersion"] = modelVersion;
metadata["epss.publishedDate"] = publishedDate.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture);
metadata["epss.rowCount"] = session.RowCount.ToString(CultureInfo.InvariantCulture);
metadata["epss.contentHash"] = contentHash;
var updatedDocument = document with { Metadata = metadata };
await _documentStore.UpsertAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
remainingDocuments.Remove(documentId);
pendingMappings.Add(documentId);
cursorState = cursorState.WithSnapshotMetadata(
modelVersion,
publishedDate,
document.Etag,
contentHash,
session.RowCount,
_timeProvider.GetUtcNow());
_diagnostics.ParseRows(session.RowCount, modelVersion);
}
var updatedCursor = cursorState
.WithPendingDocuments(remainingDocuments)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
var cursorState = cursor;
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null || document is null)
{
pendingMappings.Remove(documentId);
continue;
}
var modelVersion = TryGetString(dtoRecord.Payload, "modelVersion") ?? "unknown";
var publishedDate = TryGetDate(dtoRecord.Payload, "publishedDate")
?? TryParseDateFromMetadata(document.Metadata)
?? DateOnly.FromDateTime(document.CreatedAt.UtcDateTime);
if (!document.PayloadId.HasValue)
{
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
byte[] payload;
try
{
payload = await _rawDocumentStorage.DownloadAsync(document.PayloadId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "EPSS map failed downloading document {DocumentId}", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
int mappedRows = 0;
try
{
await using var stream = new MemoryStream(payload, writable: false);
await using var session = _parser.ParseGzip(stream);
await foreach (var row in session.WithCancellation(cancellationToken).ConfigureAwait(false))
{
_ = EpssMapper.ToObservation(row, modelVersion, publishedDate);
mappedRows++;
}
cursorState = cursorState.WithSnapshotMetadata(
modelVersion,
publishedDate,
document.Etag,
TryGetString(dtoRecord.Payload, "contentHash"),
mappedRows,
_timeProvider.GetUtcNow());
}
catch (Exception ex)
{
_logger.LogWarning(ex, "EPSS map failed for document {DocumentId}", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
_diagnostics.MapRows(mappedRows, modelVersion);
}
var updatedCursor = cursorState.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<EpssFetchResult?> TryFetchFromHttpAsync(
DateOnly snapshotDate,
EpssCursor cursor,
CancellationToken cancellationToken)
{
var fileName = GetSnapshotFileName(snapshotDate);
var uri = new Uri(_options.BaseUri, fileName);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, uri.ToString(), cancellationToken).ConfigureAwait(false);
var etag = existing?.Etag ?? cursor.ETag;
var lastModified = existing?.LastModified;
var client = _httpClientFactory.CreateClient(EpssOptions.HttpClientName);
client.Timeout = _options.HttpTimeout;
HttpResponseMessage response;
try
{
response = await SendWithRetryAsync(() => CreateRequest(uri, etag, lastModified), client, cancellationToken).ConfigureAwait(false);
}
catch (HttpRequestException ex) when (ex.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
if (response.StatusCode == HttpStatusCode.NotFound)
{
response.Dispose();
return null;
}
if (response.StatusCode == HttpStatusCode.NotModified)
{
var notModified = new EpssFetchResult(
SnapshotDate: snapshotDate,
SourceUri: uri.ToString(),
IsSuccess: false,
IsNotModified: true,
Content: null,
ContentType: response.Content.Headers.ContentType?.ToString(),
ETag: response.Headers.ETag?.Tag ?? etag,
LastModified: response.Content.Headers.LastModified);
response.Dispose();
return notModified;
}
response.EnsureSuccessStatusCode();
var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
var result = new EpssFetchResult(
SnapshotDate: snapshotDate,
SourceUri: uri.ToString(),
IsSuccess: true,
IsNotModified: false,
Content: bytes,
ContentType: response.Content.Headers.ContentType?.ToString(),
ETag: response.Headers.ETag?.Tag ?? etag,
LastModified: response.Content.Headers.LastModified);
response.Dispose();
return result;
}
private async Task<EpssFetchResult?> TryFetchFromBundleAsync(DateOnly snapshotDate, CancellationToken cancellationToken)
{
var fileName = GetSnapshotFileName(snapshotDate);
var bundlePath = ResolveBundlePath(_options.BundlePath, fileName);
if (bundlePath is null || !File.Exists(bundlePath))
{
_logger.LogWarning("EPSS bundle file not found: {Path}", bundlePath ?? fileName);
return null;
}
var bytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false);
return new EpssFetchResult(
SnapshotDate: snapshotDate,
SourceUri: $"bundle://{Path.GetFileName(bundlePath)}",
IsSuccess: true,
IsNotModified: false,
Content: bytes,
ContentType: "application/gzip",
ETag: null,
LastModified: new DateTimeOffset(File.GetLastWriteTimeUtc(bundlePath)));
}
private async Task<DocumentRecord> StoreSnapshotAsync(
EpssFetchResult fetchResult,
DateTimeOffset fetchedAt,
CancellationToken cancellationToken)
{
var sha256 = _hash.ComputeHashHex(fetchResult.Content, HashAlgorithms.Sha256);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["epss.date"] = fetchResult.SnapshotDate.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture),
["epss.file"] = GetSnapshotFileName(fetchResult.SnapshotDate)
};
if (_options.AirgapMode)
{
TryApplyBundleManifest(fetchResult.SnapshotDate, fetchResult.Content, metadata);
}
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, fetchResult.SourceUri, cancellationToken).ConfigureAwait(false);
var recordId = existing?.Id ?? Guid.NewGuid();
await _rawDocumentStorage.UploadAsync(
SourceName,
fetchResult.SourceUri,
fetchResult.Content,
fetchResult.ContentType,
ExpiresAt: null,
cancellationToken,
recordId).ConfigureAwait(false);
var record = new DocumentRecord(
recordId,
SourceName,
fetchResult.SourceUri,
fetchedAt,
sha256,
DocumentStatuses.PendingParse,
fetchResult.ContentType,
Headers: null,
Metadata: metadata,
Etag: fetchResult.ETag,
LastModified: fetchResult.LastModified,
PayloadId: recordId,
ExpiresAt: null,
Payload: fetchResult.Content,
FetchedAt: fetchedAt);
return await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
}
private void TryApplyBundleManifest(DateOnly snapshotDate, byte[] content, IDictionary<string, string> metadata)
{
var bundlePath = _options.BundlePath;
if (string.IsNullOrWhiteSpace(bundlePath))
{
return;
}
var manifestPath = ResolveBundleManifestPath(bundlePath);
if (manifestPath is null || !File.Exists(manifestPath))
{
return;
}
try
{
var entry = TryReadBundleManifestEntry(manifestPath, GetSnapshotFileName(snapshotDate));
if (entry is null)
{
return;
}
if (!string.IsNullOrWhiteSpace(entry.ModelVersion))
{
metadata["epss.manifest.modelVersion"] = entry.ModelVersion!;
}
if (entry.RowCount.HasValue)
{
metadata["epss.manifest.rowCount"] = entry.RowCount.Value.ToString(CultureInfo.InvariantCulture);
}
if (!string.IsNullOrWhiteSpace(entry.Sha256))
{
var actual = _hash.ComputeHashHex(content, HashAlgorithms.Sha256);
var expected = entry.Sha256!.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? entry.Sha256![7..]
: entry.Sha256!;
metadata["epss.manifest.sha256"] = entry.Sha256!;
if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning("EPSS bundle hash mismatch: expected {Expected}, actual {Actual}", entry.Sha256, actual);
}
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "EPSS bundle manifest parsing failed for {Path}", manifestPath);
}
}
private static string? ResolveBundlePath(string? bundlePath, string fileName)
{
if (string.IsNullOrWhiteSpace(bundlePath))
{
return null;
}
if (Directory.Exists(bundlePath))
{
return Path.Combine(bundlePath, fileName);
}
return bundlePath;
}
private static string? ResolveBundleManifestPath(string bundlePath)
{
if (Directory.Exists(bundlePath))
{
return Path.Combine(bundlePath, ManifestFileName);
}
var directory = Path.GetDirectoryName(bundlePath);
if (string.IsNullOrWhiteSpace(directory))
{
return null;
}
return Path.Combine(directory, ManifestFileName);
}
private static BundleManifestEntry? TryReadBundleManifestEntry(string manifestPath, string fileName)
{
using var stream = File.OpenRead(manifestPath);
using var doc = JsonDocument.Parse(stream);
if (!doc.RootElement.TryGetProperty("files", out var files) || files.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var entry in files.EnumerateArray())
{
if (!entry.TryGetProperty("name", out var nameValue))
{
continue;
}
var name = nameValue.GetString();
if (string.IsNullOrWhiteSpace(name) || !string.Equals(name, fileName, StringComparison.OrdinalIgnoreCase))
{
continue;
}
var modelVersion = entry.TryGetProperty("modelVersion", out var modelValue) ? modelValue.GetString() : null;
var sha256 = entry.TryGetProperty("sha256", out var shaValue) ? shaValue.GetString() : null;
var rowCount = entry.TryGetProperty("rowCount", out var rowValue) && rowValue.TryGetInt32(out var parsed)
? parsed
: (int?)null;
return new BundleManifestEntry(name, modelVersion, sha256, rowCount);
}
return null;
}
private IEnumerable<DateOnly> GetCandidateDates(EpssCursor cursor, DateOnly nowDate)
{
var startDate = _options.FetchCurrent
? nowDate
: cursor.LastProcessedDate?.AddDays(1) ?? nowDate.AddDays(-Math.Max(0, _options.CatchUpDays));
if (startDate > nowDate)
{
startDate = nowDate;
}
var maxBackfill = Math.Max(0, _options.CatchUpDays);
for (var i = 0; i <= maxBackfill; i++)
{
yield return startDate.AddDays(-i);
}
}
private static string GetSnapshotFileName(DateOnly date)
=> $"epss_scores-{date:yyyy-MM-dd}.csv.gz";
private static HttpRequestMessage CreateRequest(Uri uri, string? etag, DateTimeOffset? lastModified)
{
var request = new HttpRequestMessage(HttpMethod.Get, uri);
request.Headers.Accept.Clear();
foreach (var acceptType in AcceptTypes)
{
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue(acceptType));
}
if (!string.IsNullOrWhiteSpace(etag) && EntityTagHeaderValue.TryParse(etag, out var etagHeader))
{
request.Headers.IfNoneMatch.Add(etagHeader);
}
if (lastModified.HasValue)
{
request.Headers.IfModifiedSince = lastModified.Value;
}
return request;
}
private async Task<HttpResponseMessage> SendWithRetryAsync(
Func<HttpRequestMessage> requestFactory,
HttpClient client,
CancellationToken cancellationToken)
{
var maxAttempts = Math.Max(1, _options.MaxRetries + 1);
for (var attempt = 1; attempt <= maxAttempts; attempt++)
{
using var request = requestFactory();
try
{
var response = await client.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (ShouldRetry(response) && attempt < maxAttempts)
{
response.Dispose();
await Task.Delay(GetRetryDelay(attempt), cancellationToken).ConfigureAwait(false);
continue;
}
return response;
}
catch (Exception ex) when (attempt < maxAttempts && ex is HttpRequestException or TaskCanceledException)
{
await Task.Delay(GetRetryDelay(attempt), cancellationToken).ConfigureAwait(false);
}
}
throw new HttpRequestException("EPSS fetch exceeded retry attempts.");
}
private static bool ShouldRetry(HttpResponseMessage response)
{
if (response.StatusCode == HttpStatusCode.TooManyRequests)
{
return true;
}
var status = (int)response.StatusCode;
return status >= 500 && status < 600;
}
private static TimeSpan GetRetryDelay(int attempt)
{
var seconds = Math.Min(30, Math.Pow(2, attempt - 1));
return TimeSpan.FromSeconds(seconds);
}
private static string? TryGetString(DocumentObject payload, string key)
=> payload.TryGetValue(key, out var value) ? value.AsString : null;
private static DateOnly? TryGetDate(DocumentObject payload, string key)
{
if (!payload.TryGetValue(key, out var value))
{
return null;
}
if (value.DocumentType == DocumentType.DateTime)
{
return DateOnly.FromDateTime(value.ToUniversalTime());
}
if (value.DocumentType == DocumentType.String &&
DateOnly.TryParseExact(value.AsString, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var parsed))
{
return parsed;
}
return null;
}
private static DateOnly? TryParseDateFromMetadata(IReadOnlyDictionary<string, string>? metadata)
{
if (metadata is null)
{
return null;
}
if (!metadata.TryGetValue("epss.date", out var value) || string.IsNullOrWhiteSpace(value))
{
return null;
}
return DateOnly.TryParseExact(value, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var parsed)
? parsed
: null;
}
private async Task<EpssCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? EpssCursor.Empty : EpssCursor.FromDocument(state.Cursor);
}
private Task UpdateCursorAsync(EpssCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToDocumentObject();
return _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken);
}
private sealed record EpssFetchResult(
DateOnly SnapshotDate,
string SourceUri,
bool IsSuccess,
bool IsNotModified,
byte[]? Content,
string? ContentType,
string? ETag,
DateTimeOffset? LastModified);
private sealed record BundleManifestEntry(
string Name,
string? ModelVersion,
string? Sha256,
int? RowCount);
}

View File

@@ -0,0 +1,164 @@
using System.Globalization;
using StellaOps.Concelier.Documents;
namespace StellaOps.Concelier.Connector.Epss.Internal;
internal sealed record EpssCursor(
string? ModelVersion,
DateOnly? LastProcessedDate,
string? ETag,
string? ContentHash,
int? LastRowCount,
DateTimeOffset UpdatedAt,
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuidCollection = Array.Empty<Guid>();
public static EpssCursor Empty { get; } = new(
null,
null,
null,
null,
null,
DateTimeOffset.MinValue,
EmptyGuidCollection,
EmptyGuidCollection);
public DocumentObject ToDocumentObject()
{
var document = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()))
};
if (!string.IsNullOrWhiteSpace(ModelVersion))
{
document["modelVersion"] = ModelVersion;
}
if (LastProcessedDate.HasValue)
{
document["lastProcessedDate"] = LastProcessedDate.Value.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture);
}
if (!string.IsNullOrWhiteSpace(ETag))
{
document["etag"] = ETag;
}
if (!string.IsNullOrWhiteSpace(ContentHash))
{
document["contentHash"] = ContentHash;
}
if (LastRowCount.HasValue)
{
document["lastRowCount"] = LastRowCount.Value;
}
if (UpdatedAt > DateTimeOffset.MinValue)
{
document["updatedAt"] = UpdatedAt.UtcDateTime;
}
return document;
}
public static EpssCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var modelVersion = document.TryGetValue("modelVersion", out var modelValue) ? modelValue.AsString : null;
DateOnly? lastProcessed = null;
if (document.TryGetValue("lastProcessedDate", out var lastProcessedValue))
{
lastProcessed = lastProcessedValue.DocumentType switch
{
DocumentType.String when DateOnly.TryParseExact(lastProcessedValue.AsString, "yyyy-MM-dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out var parsed) => parsed,
DocumentType.DateTime => DateOnly.FromDateTime(lastProcessedValue.ToUniversalTime()),
_ => null
};
}
var etag = document.TryGetValue("etag", out var etagValue) ? etagValue.AsString : null;
var contentHash = document.TryGetValue("contentHash", out var hashValue) ? hashValue.AsString : null;
int? lastRowCount = null;
if (document.TryGetValue("lastRowCount", out var countValue))
{
var count = countValue.AsInt32;
if (count > 0)
{
lastRowCount = count;
}
}
DateTimeOffset updatedAt = DateTimeOffset.MinValue;
if (document.TryGetValue("updatedAt", out var updatedValue))
{
var parsed = updatedValue.AsDateTimeOffset;
if (parsed > DateTimeOffset.MinValue)
{
updatedAt = parsed;
}
}
return new EpssCursor(
string.IsNullOrWhiteSpace(modelVersion) ? null : modelVersion.Trim(),
lastProcessed,
string.IsNullOrWhiteSpace(etag) ? null : etag.Trim(),
string.IsNullOrWhiteSpace(contentHash) ? null : contentHash.Trim(),
lastRowCount,
updatedAt,
ReadGuidArray(document, "pendingDocuments"),
ReadGuidArray(document, "pendingMappings"));
}
public EpssCursor WithPendingDocuments(IEnumerable<Guid> documents)
=> this with { PendingDocuments = documents?.Distinct().ToArray() ?? EmptyGuidCollection };
public EpssCursor WithPendingMappings(IEnumerable<Guid> mappings)
=> this with { PendingMappings = mappings?.Distinct().ToArray() ?? EmptyGuidCollection };
public EpssCursor WithSnapshotMetadata(
string? modelVersion,
DateOnly? publishedDate,
string? etag,
string? contentHash,
int? rowCount,
DateTimeOffset updatedAt)
=> this with
{
ModelVersion = string.IsNullOrWhiteSpace(modelVersion) ? null : modelVersion.Trim(),
LastProcessedDate = publishedDate,
ETag = string.IsNullOrWhiteSpace(etag) ? null : etag.Trim(),
ContentHash = string.IsNullOrWhiteSpace(contentHash) ? null : contentHash.Trim(),
LastRowCount = rowCount > 0 ? rowCount : null,
UpdatedAt = updatedAt
};
private static IReadOnlyCollection<Guid> ReadGuidArray(DocumentObject document, string key)
{
if (!document.TryGetValue(key, out var value) || value is not DocumentArray array)
{
return EmptyGuidCollection;
}
var results = new List<Guid>(array.Count);
foreach (var element in array)
{
if (Guid.TryParse(element.ToString(), out var guid))
{
results.Add(guid);
}
}
return results;
}
}

View File

@@ -0,0 +1,85 @@
using System.Collections.Generic;
using System.Diagnostics.Metrics;
namespace StellaOps.Concelier.Connector.Epss.Internal;
public sealed class EpssDiagnostics : IDisposable
{
public const string MeterName = "StellaOps.Concelier.Connector.Epss";
private const string MeterVersion = "1.0.0";
private readonly Meter _meter;
private readonly Counter<long> _fetchAttempts;
private readonly Counter<long> _fetchSuccess;
private readonly Counter<long> _fetchFailures;
private readonly Counter<long> _fetchUnchanged;
private readonly Counter<long> _parsedRows;
private readonly Counter<long> _parseFailures;
private readonly Counter<long> _mappedRows;
public EpssDiagnostics()
{
_meter = new Meter(MeterName, MeterVersion);
_fetchAttempts = _meter.CreateCounter<long>(
name: "epss.fetch.attempts",
unit: "operations",
description: "Number of EPSS fetch attempts performed.");
_fetchSuccess = _meter.CreateCounter<long>(
name: "epss.fetch.success",
unit: "operations",
description: "Number of EPSS fetch attempts that produced new content.");
_fetchFailures = _meter.CreateCounter<long>(
name: "epss.fetch.failures",
unit: "operations",
description: "Number of EPSS fetch attempts that failed.");
_fetchUnchanged = _meter.CreateCounter<long>(
name: "epss.fetch.unchanged",
unit: "operations",
description: "Number of EPSS fetch attempts returning unchanged content.");
_parsedRows = _meter.CreateCounter<long>(
name: "epss.parse.rows",
unit: "rows",
description: "Number of EPSS rows parsed from snapshots.");
_parseFailures = _meter.CreateCounter<long>(
name: "epss.parse.failures",
unit: "documents",
description: "Number of EPSS snapshot parse failures.");
_mappedRows = _meter.CreateCounter<long>(
name: "epss.map.rows",
unit: "rows",
description: "Number of EPSS rows mapped into observations.");
}
public void FetchAttempt() => _fetchAttempts.Add(1);
public void FetchSuccess() => _fetchSuccess.Add(1);
public void FetchFailure() => _fetchFailures.Add(1);
public void FetchUnchanged() => _fetchUnchanged.Add(1);
public void ParseRows(int rowCount, string? modelVersion)
{
if (rowCount <= 0)
{
return;
}
_parsedRows.Add(rowCount, new KeyValuePair<string, object?>("modelVersion", modelVersion ?? string.Empty));
}
public void ParseFailure(string reason)
=> _parseFailures.Add(1, new KeyValuePair<string, object?>("reason", reason));
public void MapRows(int rowCount, string? modelVersion)
{
if (rowCount <= 0)
{
return;
}
_mappedRows.Add(rowCount, new KeyValuePair<string, object?>("modelVersion", modelVersion ?? string.Empty));
}
public void Dispose() => _meter.Dispose();
}

View File

@@ -0,0 +1,53 @@
using StellaOps.Scanner.Storage.Epss;
namespace StellaOps.Concelier.Connector.Epss.Internal;
public static class EpssMapper
{
public static EpssObservation ToObservation(
EpssScoreRow row,
string modelVersion,
DateOnly publishedDate)
{
if (string.IsNullOrWhiteSpace(modelVersion))
{
throw new ArgumentException("Model version is required.", nameof(modelVersion));
}
return new EpssObservation
{
CveId = row.CveId,
Score = (decimal)row.Score,
Percentile = (decimal)row.Percentile,
ModelVersion = modelVersion,
PublishedDate = publishedDate,
Band = DetermineBand((decimal)row.Score)
};
}
private static EpssBand DetermineBand(decimal score) => score switch
{
>= 0.70m => EpssBand.Critical,
>= 0.40m => EpssBand.High,
>= 0.10m => EpssBand.Medium,
_ => EpssBand.Low
};
}
public sealed record EpssObservation
{
public required string CveId { get; init; }
public required decimal Score { get; init; }
public required decimal Percentile { get; init; }
public required string ModelVersion { get; init; }
public required DateOnly PublishedDate { get; init; }
public required EpssBand Band { get; init; }
}
public enum EpssBand
{
Low = 0,
Medium = 1,
High = 2,
Critical = 3
}

View File

@@ -0,0 +1,47 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Connector.Epss.Internal;
namespace StellaOps.Concelier.Connector.Epss;
internal static class EpssJobKinds
{
public const string Fetch = "source:epss:fetch";
public const string Parse = "source:epss:parse";
public const string Map = "source:epss:map";
}
internal sealed class EpssFetchJob : IJob
{
private readonly EpssConnector _connector;
public EpssFetchJob(EpssConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class EpssParseJob : IJob
{
private readonly EpssConnector _connector;
public EpssParseJob(EpssConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class EpssMapJob : IJob
{
private readonly EpssConnector _connector;
public EpssMapJob(EpssConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,3 @@
using StellaOps.Plugin.Versioning;
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]

View File

@@ -0,0 +1,24 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../../Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo">
<_Parameter1>StellaOps.Concelier.Connector.Epss.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
</Project>

View File

@@ -243,6 +243,23 @@ public static class WellKnownConnectors
EgressAllowlist = ["www.cisa.gov"]
};
/// <summary>
/// EPSS (Exploit Prediction Scoring System) connector metadata.
/// </summary>
public static ConnectorMetadata Epss => new()
{
ConnectorId = "epss",
Source = "epss",
DisplayName = "EPSS",
Description = "FIRST.org Exploit Prediction Scoring System",
Capabilities = ["observations"],
ArtifactKinds = ["raw-scores", "normalized"],
DefaultCron = "0 10 * * *",
DefaultRpm = 100,
MaxLagMinutes = 1440,
EgressAllowlist = ["epss.empiricalsecurity.com"]
};
/// <summary>
/// ICS-CISA connector metadata.
/// </summary>
@@ -262,5 +279,5 @@ public static class WellKnownConnectors
/// <summary>
/// Gets metadata for all well-known connectors.
/// </summary>
public static IReadOnlyList<ConnectorMetadata> All => [Nvd, Ghsa, Osv, Kev, IcsCisa];
public static IReadOnlyList<ConnectorMetadata> All => [Nvd, Ghsa, Osv, Kev, Epss, IcsCisa];
}

View File

@@ -15,7 +15,7 @@ Deterministic merge and reconciliation engine; builds identity graph via aliases
## Interfaces & contracts
- AdvisoryMergeService.MergeAsync(ids or byKind): returns summary {processed, merged, overrides, conflicts}.
- Precedence table configurable but with sane defaults: RedHat/Ubuntu/Debian/SUSE > Vendor PSIRT > GHSA/OSV > NVD; CERTs enrich; KEV sets flags.
- Range selection uses comparers: NevraComparer, DebEvrComparer, SemVerRange; deterministic tie-breakers.
- Range selection uses comparers: NevraComparer, DebianEvrComparer, ApkVersionComparer, SemVerRange; deterministic tie-breakers.
- Provenance propagation merges unique entries; references deduped by (url, type).
## Configuration

View File

@@ -0,0 +1,410 @@
namespace StellaOps.Concelier.Merge.Comparers;
using System;
using StellaOps.Concelier.Normalization.Distro;
/// <summary>
/// Compares Alpine APK package versions using apk-tools ordering rules.
/// </summary>
public sealed class ApkVersionComparer : IComparer<ApkVersion>, IComparer<string>
{
public static ApkVersionComparer Instance { get; } = new();
private ApkVersionComparer()
{
}
public int Compare(string? x, string? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var xParsed = ApkVersion.TryParse(x, out var xVersion);
var yParsed = ApkVersion.TryParse(y, out var yVersion);
if (xParsed && yParsed)
{
return Compare(xVersion, yVersion);
}
if (xParsed)
{
return 1;
}
if (yParsed)
{
return -1;
}
return string.Compare(x, y, StringComparison.Ordinal);
}
public int Compare(ApkVersion? x, ApkVersion? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var compare = CompareVersionString(x.Version, y.Version);
if (compare != 0)
{
return compare;
}
compare = x.PkgRel.CompareTo(y.PkgRel);
if (compare != 0)
{
return compare;
}
// When pkgrel values are equal, implicit (no -r) sorts before explicit -r0
// e.g., "1.2.3" < "1.2.3-r0"
if (!x.HasExplicitPkgRel && y.HasExplicitPkgRel)
{
return -1;
}
if (x.HasExplicitPkgRel && !y.HasExplicitPkgRel)
{
return 1;
}
return 0;
}
private static int CompareVersionString(string left, string right)
{
var leftIndex = 0;
var rightIndex = 0;
while (true)
{
var leftToken = NextToken(left, ref leftIndex);
var rightToken = NextToken(right, ref rightIndex);
if (leftToken.Type == TokenType.End && rightToken.Type == TokenType.End)
{
return 0;
}
if (leftToken.Type == TokenType.End)
{
return CompareEndToken(rightToken, isLeftEnd: true);
}
if (rightToken.Type == TokenType.End)
{
return CompareEndToken(leftToken, isLeftEnd: false);
}
if (leftToken.Type != rightToken.Type)
{
var compare = CompareDifferentTypes(leftToken, rightToken);
if (compare != 0)
{
return compare;
}
}
else
{
var compare = leftToken.Type switch
{
TokenType.Numeric => CompareNumeric(leftToken.NumericValue, rightToken.NumericValue),
TokenType.Alpha => CompareAlpha(leftToken.Text, rightToken.Text),
TokenType.Suffix => CompareSuffix(leftToken, rightToken),
_ => 0
};
if (compare != 0)
{
return compare;
}
}
}
}
private static int CompareEndToken(VersionToken token, bool isLeftEnd)
{
if (token.Type == TokenType.Suffix)
{
// Compare suffix order: suffix token vs no-suffix (order=0)
// If isLeftEnd=true: comparing END (left) vs suffix (right) → return CompareSuffixOrder(0, right.order)
// If isLeftEnd=false: comparing suffix (left) vs END (right) → return CompareSuffixOrder(left.order, 0)
var compare = isLeftEnd
? CompareSuffixOrder(0, token.SuffixOrder)
: CompareSuffixOrder(token.SuffixOrder, 0);
if (compare != 0)
{
return compare;
}
return isLeftEnd ? -1 : 1;
}
return isLeftEnd ? -1 : 1;
}
private static int CompareDifferentTypes(VersionToken left, VersionToken right)
{
if (left.Type == TokenType.Suffix || right.Type == TokenType.Suffix)
{
var leftOrder = left.Type == TokenType.Suffix ? left.SuffixOrder : 0;
var rightOrder = right.Type == TokenType.Suffix ? right.SuffixOrder : 0;
var compare = CompareSuffixOrder(leftOrder, rightOrder);
if (compare != 0)
{
return compare;
}
return TokenTypeRank(left.Type).CompareTo(TokenTypeRank(right.Type));
}
return TokenTypeRank(left.Type).CompareTo(TokenTypeRank(right.Type));
}
private static int TokenTypeRank(TokenType type)
=> type switch
{
TokenType.Numeric => 3,
TokenType.Alpha => 2,
TokenType.Suffix => 1,
_ => 0
};
private static int CompareNumeric(string left, string right)
{
var leftTrimmed = TrimLeadingZeros(left);
var rightTrimmed = TrimLeadingZeros(right);
if (leftTrimmed.Length != rightTrimmed.Length)
{
return leftTrimmed.Length.CompareTo(rightTrimmed.Length);
}
return string.Compare(leftTrimmed, rightTrimmed, StringComparison.Ordinal);
}
private static int CompareAlpha(string left, string right)
=> string.Compare(left, right, StringComparison.Ordinal);
private static int CompareSuffix(VersionToken left, VersionToken right)
{
var compare = CompareSuffixOrder(left.SuffixOrder, right.SuffixOrder);
if (compare != 0)
{
return compare;
}
if (!string.IsNullOrEmpty(left.SuffixName) || !string.IsNullOrEmpty(right.SuffixName))
{
compare = string.Compare(left.SuffixName, right.SuffixName, StringComparison.Ordinal);
if (compare != 0)
{
return compare;
}
}
if (!left.HasSuffixNumber && !right.HasSuffixNumber)
{
return 0;
}
if (!left.HasSuffixNumber)
{
return -1;
}
if (!right.HasSuffixNumber)
{
return 1;
}
return CompareNumeric(left.SuffixNumber, right.SuffixNumber);
}
private static int CompareSuffixOrder(int leftOrder, int rightOrder)
=> leftOrder.CompareTo(rightOrder);
private static VersionToken NextToken(string value, ref int index)
{
while (index < value.Length)
{
var current = value[index];
if (current == '_')
{
if (index + 1 < value.Length && char.IsLetter(value[index + 1]))
{
return ReadSuffixToken(value, ref index);
}
index++;
continue;
}
if (char.IsDigit(current))
{
return ReadNumericToken(value, ref index);
}
if (char.IsLetter(current))
{
return ReadAlphaToken(value, ref index);
}
index++;
}
return VersionToken.End;
}
private static VersionToken ReadNumericToken(string value, ref int index)
{
var start = index;
while (index < value.Length && char.IsDigit(value[index]))
{
index++;
}
var number = value.Substring(start, index - start);
return VersionToken.Numeric(number);
}
private static VersionToken ReadAlphaToken(string value, ref int index)
{
var start = index;
while (index < value.Length && char.IsLetter(value[index]))
{
index++;
}
var text = value.Substring(start, index - start);
return VersionToken.Alpha(text);
}
private static VersionToken ReadSuffixToken(string value, ref int index)
{
index++;
var nameStart = index;
while (index < value.Length && char.IsLetter(value[index]))
{
index++;
}
var name = value.Substring(nameStart, index - nameStart);
if (name.Length == 0)
{
return VersionToken.End;
}
var normalizedName = name.ToLowerInvariant();
var order = normalizedName switch
{
"alpha" => -4,
"beta" => -3,
"pre" => -2,
"rc" => -1,
"p" => 1,
_ => 0
};
var numberStart = index;
while (index < value.Length && char.IsDigit(value[index]))
{
index++;
}
var number = value.Substring(numberStart, index - numberStart);
return VersionToken.Suffix(normalizedName, order, number);
}
private static string TrimLeadingZeros(string value)
{
if (string.IsNullOrEmpty(value))
{
return "0";
}
var index = 0;
while (index < value.Length && value[index] == '0')
{
index++;
}
var trimmed = value[index..];
return trimmed.Length == 0 ? "0" : trimmed;
}
private enum TokenType
{
End,
Numeric,
Alpha,
Suffix
}
private readonly struct VersionToken
{
private VersionToken(TokenType type, string text, string numeric, string suffixName, int suffixOrder, string suffixNumber, bool hasSuffixNumber)
{
Type = type;
Text = text;
NumericValue = numeric;
SuffixName = suffixName;
SuffixOrder = suffixOrder;
SuffixNumber = suffixNumber;
HasSuffixNumber = hasSuffixNumber;
}
public static VersionToken End { get; } = new(TokenType.End, string.Empty, string.Empty, string.Empty, 0, string.Empty, false);
public static VersionToken Numeric(string value)
=> new(TokenType.Numeric, string.Empty, value ?? string.Empty, string.Empty, 0, string.Empty, false);
public static VersionToken Alpha(string value)
=> new(TokenType.Alpha, value ?? string.Empty, string.Empty, string.Empty, 0, string.Empty, false);
public static VersionToken Suffix(string name, int order, string number)
{
var hasNumber = !string.IsNullOrEmpty(number);
return new VersionToken(TokenType.Suffix, string.Empty, string.Empty, name ?? string.Empty, order, hasNumber ? TrimLeadingZeros(number) : string.Empty, hasNumber);
}
public TokenType Type { get; }
public string Text { get; }
public string NumericValue { get; }
public string SuffixName { get; }
public int SuffixOrder { get; }
public string SuffixNumber { get; }
public bool HasSuffixNumber { get; }
}
}

View File

@@ -78,13 +78,7 @@ public sealed class DebianEvrComparer : IComparer<DebianEvr>, IComparer<string>
return compare;
}
compare = CompareSegment(x.Revision, y.Revision);
if (compare != 0)
{
return compare;
}
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
return CompareSegment(x.Revision, y.Revision);
}
private static int CompareSegment(string left, string right)

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Concelier.Merge.Comparers;
/// <summary>
/// Provides version comparison with optional proof output.
/// </summary>
public interface IVersionComparator
{
/// <summary>
/// Compares two version strings.
/// </summary>
int Compare(string? left, string? right);
/// <summary>
/// Compares two version strings and returns proof lines.
/// </summary>
VersionComparisonResult CompareWithProof(string? left, string? right);
}

View File

@@ -90,13 +90,7 @@ public sealed class NevraComparer : IComparer<Nevra>, IComparer<string>
return compare;
}
compare = RpmVersionComparer.Compare(x.Release, y.Release);
if (compare != 0)
{
return compare;
}
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
return RpmVersionComparer.Compare(x.Release, y.Release);
}
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Concelier.Merge.Comparers;
using System.Collections.Immutable;
/// <summary>
/// Result of a version comparison with explainability proof lines.
/// </summary>
public sealed record VersionComparisonResult(
int Comparison,
ImmutableArray<string> ProofLines);

View File

@@ -13,5 +13,6 @@
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.VersionComparison/StellaOps.VersionComparison.csproj" />
</ItemGroup>
</Project>

View File

@@ -4,7 +4,7 @@ Canonical data model for normalized advisories and all downstream serialization.
## Scope
- Canonical types: Advisory, AdvisoryReference, CvssMetric, AffectedPackage, AffectedVersionRange, AdvisoryProvenance.
- Invariants: stable ordering, culture-invariant serialization, UTC timestamps, deterministic equality semantics.
- Field semantics: preserve all aliases/references; ranges per ecosystem (NEVRA/EVR/SemVer); provenance on every mapped field.
- Field semantics: preserve all aliases/references; ranges per ecosystem (NEVRA/EVR/APK/SemVer); provenance on every mapped field.
- Backward/forward compatibility: additive evolution; versioned DTOs where needed; no breaking field renames.
- Detailed field coverage documented in `CANONICAL_RECORDS.md`; update alongside model changes.
## Participants

View File

@@ -90,6 +90,7 @@ public static class AffectedPackageTypes
{
public const string Rpm = "rpm";
public const string Deb = "deb";
public const string Apk = "apk";
public const string Cpe = "cpe";
public const string SemVer = "semver";
public const string Vendor = "vendor";

View File

@@ -40,6 +40,7 @@ public static class AffectedVersionRangeExtensions
NormalizedVersionSchemes.SemVer => BuildSemVerFallback(range, notes),
NormalizedVersionSchemes.Nevra => BuildNevraFallback(range, notes),
NormalizedVersionSchemes.Evr => BuildEvrFallback(range, notes),
NormalizedVersionSchemes.Apk => BuildApkFallback(range, notes),
_ => null,
};
}
@@ -218,4 +219,68 @@ public static class AffectedVersionRangeExtensions
return null;
}
private static NormalizedVersionRule? BuildApkFallback(AffectedVersionRange range, string? notes)
{
var resolvedNotes = Validation.TrimToNull(notes);
var introduced = Validation.TrimToNull(range.IntroducedVersion);
var fixedVersion = Validation.TrimToNull(range.FixedVersion);
var lastAffected = Validation.TrimToNull(range.LastAffectedVersion);
if (!string.IsNullOrEmpty(introduced) && !string.IsNullOrEmpty(fixedVersion))
{
return new NormalizedVersionRule(
NormalizedVersionSchemes.Apk,
NormalizedVersionRuleTypes.Range,
min: introduced,
minInclusive: true,
max: fixedVersion,
maxInclusive: false,
notes: resolvedNotes);
}
if (!string.IsNullOrEmpty(introduced) && !string.IsNullOrEmpty(lastAffected))
{
return new NormalizedVersionRule(
NormalizedVersionSchemes.Apk,
NormalizedVersionRuleTypes.Range,
min: introduced,
minInclusive: true,
max: lastAffected,
maxInclusive: true,
notes: resolvedNotes);
}
if (!string.IsNullOrEmpty(introduced))
{
return new NormalizedVersionRule(
NormalizedVersionSchemes.Apk,
NormalizedVersionRuleTypes.GreaterThanOrEqual,
min: introduced,
minInclusive: true,
notes: resolvedNotes);
}
if (!string.IsNullOrEmpty(fixedVersion))
{
return new NormalizedVersionRule(
NormalizedVersionSchemes.Apk,
NormalizedVersionRuleTypes.LessThan,
max: fixedVersion,
maxInclusive: false,
notes: resolvedNotes);
}
if (!string.IsNullOrEmpty(lastAffected))
{
return new NormalizedVersionRule(
NormalizedVersionSchemes.Apk,
NormalizedVersionRuleTypes.LessThanOrEqual,
max: lastAffected,
maxInclusive: true,
notes: resolvedNotes);
}
return null;
}
}

View File

@@ -56,7 +56,7 @@ Deterministic ordering: by `role` (nulls first) then `displayName`.
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `type` | string | yes | Semantic type (`semver`, `rpm`, `deb`, `purl`, `cpe`, etc.). Lowercase. |
| `type` | string | yes | Semantic type (`semver`, `rpm`, `deb`, `apk`, `purl`, `cpe`, etc.). Lowercase. |
| `identifier` | string | yes | Canonical identifier (package name, PURL, CPE, NEVRA, etc.). |
| `platform` | string? | optional | Explicit platform / distro (e.g. `ubuntu`, `rhel-8`). |
| `versionRanges` | AffectedVersionRange[] | yes | Deduplicated + sorted by introduced/fixed/last/expr/kind. |
@@ -69,7 +69,7 @@ Deterministic ordering: packages sorted by `type`, then `identifier`, then `plat
| Field | Type | Required | Notes |
|-------|------|----------|-------|
| `rangeKind` | string | yes | Classification of range semantics (`semver`, `evr`, `nevra`, `version`, `purl`). Lowercase. |
| `rangeKind` | string | yes | Classification of range semantics (`semver`, `evr`, `nevra`, `apk`, `version`, `purl`). Lowercase. |
| `introducedVersion` | string? | optional | Inclusive lower bound when impact begins. |
| `fixedVersion` | string? | optional | Exclusive bounding version containing the fix. |
| `lastAffectedVersion` | string? | optional | Inclusive upper bound when no fix exists. |

View File

@@ -172,6 +172,7 @@ public static class NormalizedVersionSchemes
public const string SemVer = "semver";
public const string Nevra = "nevra";
public const string Evr = "evr";
public const string Apk = "apk";
}
public static class NormalizedVersionRuleTypes

View File

@@ -0,0 +1,109 @@
using System.Globalization;
namespace StellaOps.Concelier.Normalization.Distro;
/// <summary>
/// Represents an Alpine APK version (<c>version-rpkgrel</c>) tuple.
/// </summary>
public sealed class ApkVersion
{
private ApkVersion(string version, int pkgRel, bool hasExplicitPkgRel, string original)
{
Version = version;
PkgRel = pkgRel;
HasExplicitPkgRel = hasExplicitPkgRel;
Original = original;
}
/// <summary>
/// Version component before the <c>-r</c> release suffix.
/// </summary>
public string Version { get; }
/// <summary>
/// Package release number (defaults to <c>0</c> when omitted).
/// </summary>
public int PkgRel { get; }
/// <summary>
/// Indicates whether the <c>-r</c> suffix was explicitly present.
/// </summary>
public bool HasExplicitPkgRel { get; }
/// <summary>
/// Original trimmed input value.
/// </summary>
public string Original { get; }
/// <summary>
/// Attempts to parse an APK version string.
/// </summary>
public static bool TryParse(string? value, out ApkVersion? result)
{
result = null;
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
var releaseIndex = trimmed.LastIndexOf("-r", StringComparison.Ordinal);
if (releaseIndex < 0)
{
if (trimmed.Length == 0)
{
return false;
}
result = new ApkVersion(trimmed, 0, hasExplicitPkgRel: false, trimmed);
return true;
}
if (releaseIndex == 0 || releaseIndex >= trimmed.Length - 2)
{
return false;
}
var versionPart = trimmed[..releaseIndex];
var pkgRelPart = trimmed[(releaseIndex + 2)..];
if (string.IsNullOrWhiteSpace(versionPart))
{
return false;
}
if (!int.TryParse(pkgRelPart, NumberStyles.Integer, CultureInfo.InvariantCulture, out var pkgRel))
{
return false;
}
result = new ApkVersion(versionPart, pkgRel, hasExplicitPkgRel: true, trimmed);
return true;
}
/// <summary>
/// Parses an APK version string or throws <see cref="FormatException"/>.
/// </summary>
public static ApkVersion Parse(string value)
{
if (!TryParse(value, out var version))
{
throw new FormatException($"Input '{value}' is not a valid APK version string.");
}
return version!;
}
/// <summary>
/// Returns a canonical APK version string.
/// </summary>
public string ToCanonicalString()
{
var suffix = HasExplicitPkgRel || PkgRel > 0 ? $"-r{PkgRel}" : string.Empty;
return $"{Version}{suffix}";
}
/// <inheritdoc />
public override string ToString() => Original;
}

View File

@@ -0,0 +1,148 @@
-- Vuln Schema Migration 006b: Complete merge_events Partition Migration
-- Sprint: SPRINT_3422_0001_0001 - Time-Based Partitioning
-- Task: 3.3 - Migrate data from existing table
-- Category: C (data migration, requires maintenance window)
--
-- IMPORTANT: Run this during maintenance window AFTER 006_partition_merge_events.sql
-- Prerequisites:
-- 1. Stop concelier/vuln services (pause advisory merge operations)
-- 2. Verify partitioned table exists: \d+ vuln.merge_events_partitioned
--
-- Execution time depends on data volume. For large tables (>1M rows), consider
-- batched migration (see bottom of file).
BEGIN;
-- ============================================================================
-- Step 1: Verify partitioned table exists
-- ============================================================================
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_class c
JOIN pg_namespace n ON c.relnamespace = n.oid
WHERE n.nspname = 'vuln' AND c.relname = 'merge_events_partitioned'
) THEN
RAISE EXCEPTION 'Partitioned table vuln.merge_events_partitioned does not exist. Run 006_partition_merge_events.sql first.';
END IF;
END
$$;
-- ============================================================================
-- Step 2: Record row counts for verification
-- ============================================================================
DO $$
DECLARE
v_source_count BIGINT;
BEGIN
SELECT COUNT(*) INTO v_source_count FROM vuln.merge_events;
RAISE NOTICE 'Source table row count: %', v_source_count;
END
$$;
-- ============================================================================
-- Step 3: Migrate data from old table to partitioned table
-- ============================================================================
INSERT INTO vuln.merge_events_partitioned (
id, advisory_id, source_id, event_type, old_value, new_value, created_at
)
SELECT
id, advisory_id, source_id, event_type, old_value, new_value, created_at
FROM vuln.merge_events
ON CONFLICT DO NOTHING;
-- ============================================================================
-- Step 4: Verify row counts match
-- ============================================================================
DO $$
DECLARE
v_source_count BIGINT;
v_target_count BIGINT;
BEGIN
SELECT COUNT(*) INTO v_source_count FROM vuln.merge_events;
SELECT COUNT(*) INTO v_target_count FROM vuln.merge_events_partitioned;
IF v_source_count <> v_target_count THEN
RAISE WARNING 'Row count mismatch: source=% target=%. Check for conflicts.', v_source_count, v_target_count;
ELSE
RAISE NOTICE 'Row counts match: % rows migrated successfully', v_target_count;
END IF;
END
$$;
-- ============================================================================
-- Step 5: Drop foreign key constraints referencing this table
-- ============================================================================
-- Drop FK constraints first (advisory_id references advisories)
ALTER TABLE vuln.merge_events DROP CONSTRAINT IF EXISTS merge_events_advisory_id_fkey;
ALTER TABLE vuln.merge_events DROP CONSTRAINT IF EXISTS merge_events_source_id_fkey;
-- ============================================================================
-- Step 6: Swap tables
-- ============================================================================
-- Rename old table to backup
ALTER TABLE IF EXISTS vuln.merge_events RENAME TO merge_events_old;
-- Rename partitioned table to production name
ALTER TABLE vuln.merge_events_partitioned RENAME TO merge_events;
-- Update sequence to continue from max ID
DO $$
DECLARE
v_max_id BIGINT;
BEGIN
SELECT COALESCE(MAX(id), 0) INTO v_max_id FROM vuln.merge_events;
IF EXISTS (SELECT 1 FROM pg_sequences WHERE schemaname = 'vuln' AND sequencename = 'merge_events_id_seq') THEN
PERFORM setval('vuln.merge_events_id_seq', v_max_id + 1, false);
END IF;
END
$$;
-- ============================================================================
-- Step 7: Add comment about partitioning strategy
-- ============================================================================
COMMENT ON TABLE vuln.merge_events IS
'Advisory merge event log. Partitioned monthly by created_at. FK to advisories removed for partition support. Migrated: ' || NOW()::TEXT;
COMMIT;
-- ============================================================================
-- Cleanup (run manually after validation)
-- ============================================================================
-- After confirming the migration is successful (wait 24-48h), drop the old table:
-- DROP TABLE IF EXISTS vuln.merge_events_old;
-- ============================================================================
-- Batched Migration Alternative (for very large tables)
-- ============================================================================
-- If the table has >10M rows, consider this batched approach instead:
--
-- DO $$
-- DECLARE
-- v_batch_size INT := 100000;
-- v_offset INT := 0;
-- v_migrated INT := 0;
-- BEGIN
-- LOOP
-- INSERT INTO vuln.merge_events_partitioned
-- SELECT * FROM vuln.merge_events
-- ORDER BY id
-- OFFSET v_offset LIMIT v_batch_size
-- ON CONFLICT DO NOTHING;
--
-- GET DIAGNOSTICS v_migrated = ROW_COUNT;
-- EXIT WHEN v_migrated < v_batch_size;
-- v_offset := v_offset + v_batch_size;
-- RAISE NOTICE 'Migrated % rows total', v_offset;
-- COMMIT;
-- END LOOP;
-- END
-- $$;