Refactor code structure for improved readability and maintainability; optimize performance in key functions.

This commit is contained in:
master
2025-12-22 19:06:31 +02:00
parent dfaa2079aa
commit 4602ccc3a3
1444 changed files with 109919 additions and 8058 deletions

View File

@@ -0,0 +1,25 @@
# Concelier Alpine Connector Charter
## Mission
Implement and maintain the Alpine secdb connector that ingests Alpine Linux package fix data into Concelier under the Aggregation-Only Contract (AOC). Preserve APK version semantics and provenance while keeping ingestion deterministic and offline-ready.
## Scope
- Connector fetch/parse/map logic in `StellaOps.Concelier.Connector.Distro.Alpine`.
- Alpine secdb JSON parsing and normalization of package fix entries.
- Source cursor/fetch caching and deterministic mapping.
- Unit/integration tests and fixtures for secdb parsing and mapping.
## Required Reading
- `docs/modules/concelier/architecture.md`
- `docs/ingestion/aggregation-only-contract.md`
- `docs/modules/concelier/operations/connectors/alpine.md`
- `docs/modules/concelier/operations/mirror.md`
- `docs/product-advisories/archived/22-Dec-2025 - Getting Distro Backport Logic Right.md`
## Working Agreement
1. **Status sync**: update task state to `DOING`/`DONE` in the sprint file and local `TASKS.md` before/after work.
2. **AOC adherence**: do not derive severity or merge fields; persist upstream data with provenance.
3. **Determinism**: sort packages, version keys, and CVE lists; normalize timestamps to UTC ISO-8601.
4. **Offline readiness**: only fetch from allowlisted secdb hosts; document bundle usage for air-gapped runs.
5. **Testing**: add fixtures for parsing and mapping; keep integration tests deterministic and opt-in.
6. **Documentation**: update connector ops docs when configuration or mapping changes.

View File

@@ -0,0 +1,538 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Documents;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
using StellaOps.Concelier.Connector.Distro.Alpine.Internal;
using StellaOps.Concelier.Storage;
using StellaOps.Concelier.Storage.Advisories;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public sealed class AlpineConnector : IFeedConnector
{
private const string SchemaVersion = "alpine.secdb.v1";
private readonly SourceFetchService _fetchService;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly AlpineOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AlpineConnector> _logger;
private static readonly Action<ILogger, string, int, Exception?> LogMapped =
LoggerMessage.Define<string, int>(
LogLevel.Information,
new EventId(1, "AlpineMapped"),
"Alpine secdb {Stream} mapped {AdvisoryCount} advisories");
public AlpineConnector(
SourceFetchService fetchService,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<AlpineOptions> options,
TimeProvider? timeProvider,
ILogger<AlpineConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string SourceName => AlpineConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var pendingDocuments = new HashSet<Guid>(cursor.PendingDocuments);
var pendingMappings = new HashSet<Guid>(cursor.PendingMappings);
var fetchCache = new Dictionary<string, AlpineFetchCacheEntry>(cursor.FetchCache, StringComparer.OrdinalIgnoreCase);
var touchedResources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var targets = BuildTargets().ToList();
var maxDocuments = Math.Clamp(_options.MaxDocumentsPerFetch, 1, 200);
var pruneCache = targets.Count <= maxDocuments;
foreach (var target in targets.Take(maxDocuments))
{
cancellationToken.ThrowIfCancellationRequested();
var cacheKey = target.Uri.ToString();
touchedResources.Add(cacheKey);
cursor.TryGetCache(cacheKey, out var cachedEntry);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, cacheKey, cancellationToken).ConfigureAwait(false);
var metadata = BuildMetadata(target.Release, target.Repository, target.Stream, target.Uri);
var request = new SourceFetchRequest(AlpineOptions.HttpClientName, SourceName, target.Uri)
{
Metadata = metadata,
AcceptHeaders = new[] { "application/json" },
TimeoutOverride = _options.FetchTimeout,
ETag = existing?.Etag ?? cachedEntry?.ETag,
LastModified = existing?.LastModified ?? cachedEntry?.LastModified,
};
SourceFetchResult result;
try
{
result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Alpine secdb fetch failed for {Uri}", target.Uri);
await _stateRepository.MarkFailureAsync(SourceName, now, TimeSpan.FromMinutes(5), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
if (result.IsNotModified)
{
if (existing is not null)
{
fetchCache[cacheKey] = new AlpineFetchCacheEntry(existing.Etag, existing.LastModified);
if (string.Equals(existing.Status, DocumentStatuses.Mapped, StringComparison.Ordinal))
{
pendingDocuments.Remove(existing.Id);
pendingMappings.Remove(existing.Id);
}
}
continue;
}
if (!result.IsSuccess || result.Document is null)
{
continue;
}
fetchCache[cacheKey] = AlpineFetchCacheEntry.FromDocument(result.Document);
pendingDocuments.Add(result.Document.Id);
pendingMappings.Remove(result.Document.Id);
if (_options.RequestDelay > TimeSpan.Zero)
{
try
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
break;
}
}
}
if (pruneCache && fetchCache.Count > 0 && touchedResources.Count > 0)
{
var staleKeys = fetchCache.Keys.Where(key => !touchedResources.Contains(key)).ToArray();
foreach (var key in staleKeys)
{
fetchCache.Remove(key);
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithFetchCache(fetchCache);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var remaining = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
remaining.Remove(documentId);
continue;
}
if (!document.PayloadId.HasValue)
{
_logger.LogWarning("Alpine secdb document {DocumentId} missing raw payload", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
continue;
}
byte[] bytes;
try
{
bytes = await _rawDocumentStorage.DownloadAsync(document.PayloadId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to download Alpine secdb document {DocumentId}", document.Id);
throw;
}
AlpineSecDbDto dto;
try
{
var json = Encoding.UTF8.GetString(bytes);
dto = AlpineSecDbParser.Parse(json);
dto = ApplyMetadataFallbacks(dto, document);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to parse Alpine secdb payload for document {DocumentId}", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
continue;
}
var payload = ToDocument(dto);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, SchemaVersion, payload, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
remaining.Remove(document.Id);
if (!pendingMappings.Contains(document.Id))
{
pendingMappings.Add(document.Id);
}
}
var updatedCursor = cursor
.WithPendingDocuments(remaining)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null || document is null)
{
pendingMappings.Remove(documentId);
continue;
}
AlpineSecDbDto dto;
try
{
dto = FromDocument(dtoRecord.Payload);
dto = ApplyMetadataFallbacks(dto, document);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to deserialize Alpine secdb DTO for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
var advisories = AlpineMapper.Map(dto, document, _timeProvider.GetUtcNow());
foreach (var advisory in advisories)
{
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
}
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
if (advisories.Count > 0)
{
var stream = BuildStream(dto);
LogMapped(_logger, stream, advisories.Count, null);
}
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<AlpineCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? AlpineCursor.Empty : AlpineCursor.FromDocument(state.Cursor);
}
private async Task UpdateCursorAsync(AlpineCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToDocumentObject();
await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private IEnumerable<AlpineTarget> BuildTargets()
{
var releases = NormalizeList(_options.Releases);
var repositories = NormalizeList(_options.Repositories);
foreach (var release in releases)
{
foreach (var repository in repositories)
{
var stream = $"{release}/{repository}";
var relative = $"{release}/{repository}.json";
var uri = new Uri(_options.BaseUri, relative);
yield return new AlpineTarget(release, repository, stream, uri);
}
}
}
private static string[] NormalizeList(string[] values)
{
if (values is null || values.Length == 0)
{
return Array.Empty<string>();
}
return values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static Dictionary<string, string> BuildMetadata(string release, string repository, string stream, Uri uri)
{
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["alpine.release"] = release,
["alpine.repo"] = repository,
["source.stream"] = stream,
["document.id"] = $"alpine:{stream}",
["alpine.uri"] = uri.ToString(),
};
return metadata;
}
private static AlpineSecDbDto ApplyMetadataFallbacks(AlpineSecDbDto dto, DocumentRecord document)
{
if (document.Metadata is null || document.Metadata.Count == 0)
{
return dto;
}
var distro = dto.DistroVersion;
var repo = dto.RepoName;
var prefix = dto.UrlPrefix;
if (string.IsNullOrWhiteSpace(distro) && document.Metadata.TryGetValue("alpine.release", out var release))
{
distro = release;
}
if (string.IsNullOrWhiteSpace(repo) && document.Metadata.TryGetValue("alpine.repo", out var repoValue))
{
repo = repoValue;
}
if (string.IsNullOrWhiteSpace(prefix) && document.Metadata.TryGetValue("alpine.uri", out var uriValue))
{
if (Uri.TryCreate(uriValue, UriKind.Absolute, out var parsed))
{
prefix = parsed.GetLeftPart(UriPartial.Authority) + "/";
}
}
return dto with
{
DistroVersion = distro ?? string.Empty,
RepoName = repo ?? string.Empty,
UrlPrefix = prefix ?? string.Empty
};
}
private static string BuildStream(AlpineSecDbDto dto)
{
var release = dto.DistroVersion?.Trim();
var repo = dto.RepoName?.Trim();
if (!string.IsNullOrWhiteSpace(release) && !string.IsNullOrWhiteSpace(repo))
{
return $"{release}/{repo}";
}
if (!string.IsNullOrWhiteSpace(release))
{
return release;
}
if (!string.IsNullOrWhiteSpace(repo))
{
return repo;
}
return "unknown";
}
private static DocumentObject ToDocument(AlpineSecDbDto dto)
{
var packages = new DocumentArray();
foreach (var package in dto.Packages)
{
var secfixes = new DocumentObject();
foreach (var pair in package.Secfixes.OrderBy(pair => pair.Key, StringComparer.OrdinalIgnoreCase))
{
var cves = pair.Value ?? Array.Empty<string>();
var ordered = cves
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
secfixes[pair.Key] = new DocumentArray(ordered);
}
packages.Add(new DocumentObject
{
["name"] = package.Name,
["secfixes"] = secfixes
});
}
var doc = new DocumentObject
{
["distroVersion"] = dto.DistroVersion,
["repoName"] = dto.RepoName,
["urlPrefix"] = dto.UrlPrefix,
["packages"] = packages
};
return doc;
}
private static AlpineSecDbDto FromDocument(DocumentObject document)
{
var distroVersion = document.GetValue("distroVersion", string.Empty).AsString;
var repoName = document.GetValue("repoName", string.Empty).AsString;
var urlPrefix = document.GetValue("urlPrefix", string.Empty).AsString;
var packages = new List<AlpinePackageDto>();
if (document.TryGetValue("packages", out var packageValue) && packageValue is DocumentArray packageArray)
{
foreach (var element in packageArray.OfType<DocumentObject>())
{
var name = element.GetValue("name", string.Empty).AsString;
if (string.IsNullOrWhiteSpace(name))
{
continue;
}
var secfixes = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
if (element.TryGetValue("secfixes", out var secfixesValue) && secfixesValue is DocumentObject secfixesDoc)
{
foreach (var entry in secfixesDoc.Elements)
{
if (string.IsNullOrWhiteSpace(entry.Name))
{
continue;
}
if (entry.Value is not DocumentArray cveArray)
{
continue;
}
var cves = cveArray
.OfType<DocumentValue>()
.Select(static value => value.ToString())
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value!.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
if (cves.Length > 0)
{
secfixes[entry.Name] = cves;
}
}
}
packages.Add(new AlpinePackageDto(name.Trim(), secfixes));
}
}
var orderedPackages = packages
.OrderBy(pkg => pkg.Name, StringComparer.OrdinalIgnoreCase)
.Select(static pkg => pkg with { Secfixes = OrderSecfixes(pkg.Secfixes) })
.ToList();
return new AlpineSecDbDto(distroVersion, repoName, urlPrefix, orderedPackages);
}
private static IReadOnlyDictionary<string, string[]> OrderSecfixes(IReadOnlyDictionary<string, string[]> secfixes)
{
if (secfixes is null || secfixes.Count == 0)
{
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
}
var ordered = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in secfixes.OrderBy(pair => pair.Key, StringComparer.OrdinalIgnoreCase))
{
var values = pair.Value ?? Array.Empty<string>();
ordered[pair.Key] = values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
return ordered;
}
private sealed record AlpineTarget(string Release, string Repository, string Stream, Uri Uri);
}

View File

@@ -0,0 +1,20 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public sealed class AlpineConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "distro-alpine";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services) => services is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<AlpineConnector>(services);
}
}

View File

@@ -0,0 +1,53 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public sealed class AlpineDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "concelier:sources:alpine";
private const string FetchSchedule = "*/30 * * * *";
private const string ParseSchedule = "7,37 * * * *";
private const string MapSchedule = "12,42 * * * *";
private static readonly TimeSpan FetchTimeout = TimeSpan.FromMinutes(5);
private static readonly TimeSpan ParseTimeout = TimeSpan.FromMinutes(6);
private static readonly TimeSpan MapTimeout = TimeSpan.FromMinutes(8);
private static readonly TimeSpan LeaseDuration = TimeSpan.FromMinutes(4);
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddAlpineConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
var scheduler = new JobSchedulerBuilder(services);
scheduler
.AddJob<AlpineFetchJob>(
AlpineJobKinds.Fetch,
cronExpression: FetchSchedule,
timeout: FetchTimeout,
leaseDuration: LeaseDuration)
.AddJob<AlpineParseJob>(
AlpineJobKinds.Parse,
cronExpression: ParseSchedule,
timeout: ParseTimeout,
leaseDuration: LeaseDuration)
.AddJob<AlpineMapJob>(
AlpineJobKinds.Map,
cronExpression: MapSchedule,
timeout: MapTimeout,
leaseDuration: LeaseDuration);
return services;
}
}

View File

@@ -0,0 +1,35 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Http;
using StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
public static class AlpineServiceCollectionExtensions
{
public static IServiceCollection AddAlpineConnector(this IServiceCollection services, Action<AlpineOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.AddOptions<AlpineOptions>()
.Configure(configure)
.PostConfigure(static options => options.Validate());
services.AddSourceHttpClient(AlpineOptions.HttpClientName, (sp, httpOptions) =>
{
var options = sp.GetRequiredService<IOptions<AlpineOptions>>().Value;
var authority = options.BaseUri.GetLeftPart(UriPartial.Authority);
httpOptions.BaseAddress = string.IsNullOrWhiteSpace(authority) ? options.BaseUri : new Uri(authority);
httpOptions.Timeout = options.FetchTimeout;
httpOptions.UserAgent = options.UserAgent;
httpOptions.AllowedHosts.Clear();
httpOptions.AllowedHosts.Add(options.BaseUri.Host);
httpOptions.DefaultRequestHeaders["Accept"] = "application/json";
});
services.AddTransient<AlpineConnector>();
return services;
}
}

View File

@@ -0,0 +1,5 @@
using System.Runtime.CompilerServices;
using StellaOps.Plugin.Versioning;
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.Alpine.Tests")]
[assembly: StellaPluginVersion("1.0.0", MinimumHostVersion = "1.0.0", MaximumHostVersion = "1.99.99")]

View File

@@ -0,0 +1,77 @@
using System;
using System.Linq;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Configuration;
public sealed class AlpineOptions
{
public const string HttpClientName = "concelier.alpine";
/// <summary>
/// Base URI for Alpine secdb JSON content.
/// </summary>
public Uri BaseUri { get; set; } = new("https://secdb.alpinelinux.org/");
/// <summary>
/// Releases to fetch (for example: v3.18, v3.19, v3.20, edge).
/// </summary>
public string[] Releases { get; set; } = new[] { "v3.18", "v3.19", "v3.20", "edge" };
/// <summary>
/// Repository names to fetch (for example: main, community).
/// </summary>
public string[] Repositories { get; set; } = new[] { "main", "community" };
/// <summary>
/// Cap on release+repo documents fetched in a single run.
/// </summary>
public int MaxDocumentsPerFetch { get; set; } = 20;
/// <summary>
/// Fetch timeout for each secdb request.
/// </summary>
public TimeSpan FetchTimeout { get; set; } = TimeSpan.FromSeconds(45);
/// <summary>
/// Optional pacing delay between secdb requests.
/// </summary>
public TimeSpan RequestDelay { get; set; } = TimeSpan.Zero;
/// <summary>
/// Custom user-agent for secdb requests.
/// </summary>
public string UserAgent { get; set; } = "StellaOps.Concelier.Alpine/0.1 (+https://stella-ops.org)";
public void Validate()
{
if (BaseUri is null || !BaseUri.IsAbsoluteUri)
{
throw new InvalidOperationException("Alpine BaseUri must be an absolute URI.");
}
if (MaxDocumentsPerFetch <= 0 || MaxDocumentsPerFetch > 200)
{
throw new InvalidOperationException("MaxDocumentsPerFetch must be between 1 and 200.");
}
if (FetchTimeout <= TimeSpan.Zero || FetchTimeout > TimeSpan.FromMinutes(5))
{
throw new InvalidOperationException("FetchTimeout must be positive and less than five minutes.");
}
if (RequestDelay < TimeSpan.Zero || RequestDelay > TimeSpan.FromSeconds(10))
{
throw new InvalidOperationException("RequestDelay must be between 0 and 10 seconds.");
}
if (Releases is null || Releases.Length == 0 || Releases.All(static value => string.IsNullOrWhiteSpace(value)))
{
throw new InvalidOperationException("At least one Alpine release must be configured.");
}
if (Repositories is null || Repositories.Length == 0 || Repositories.All(static value => string.IsNullOrWhiteSpace(value)))
{
throw new InvalidOperationException("At least one Alpine repository must be configured.");
}
}
}

View File

@@ -0,0 +1,13 @@
using System.Collections.Generic;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Dto;
internal sealed record AlpineSecDbDto(
string DistroVersion,
string RepoName,
string UrlPrefix,
IReadOnlyList<AlpinePackageDto> Packages);
internal sealed record AlpinePackageDto(
string Name,
IReadOnlyDictionary<string, string[]> Secfixes);

View File

@@ -0,0 +1,119 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Concelier.Documents;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal sealed record AlpineCursor(
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings,
IReadOnlyDictionary<string, AlpineFetchCacheEntry> FetchCache)
{
private static readonly IReadOnlyCollection<Guid> EmptyGuidList = Array.Empty<Guid>();
private static readonly IReadOnlyDictionary<string, AlpineFetchCacheEntry> EmptyCache =
new Dictionary<string, AlpineFetchCacheEntry>(StringComparer.OrdinalIgnoreCase);
public static AlpineCursor Empty { get; } = new(EmptyGuidList, EmptyGuidList, EmptyCache);
public static AlpineCursor FromDocument(DocumentObject? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
var pendingDocuments = ReadGuidSet(document, "pendingDocuments");
var pendingMappings = ReadGuidSet(document, "pendingMappings");
var cache = ReadCache(document);
return new AlpineCursor(pendingDocuments, pendingMappings, cache);
}
public DocumentObject ToDocumentObject()
{
var doc = new DocumentObject
{
["pendingDocuments"] = new DocumentArray(PendingDocuments.Select(id => id.ToString())),
["pendingMappings"] = new DocumentArray(PendingMappings.Select(id => id.ToString()))
};
if (FetchCache.Count > 0)
{
var cacheDoc = new DocumentObject();
foreach (var (key, entry) in FetchCache)
{
cacheDoc[key] = entry.ToDocumentObject();
}
doc["fetchCache"] = cacheDoc;
}
return doc;
}
public AlpineCursor WithPendingDocuments(IEnumerable<Guid> ids)
=> this with { PendingDocuments = ids?.Distinct().ToArray() ?? EmptyGuidList };
public AlpineCursor WithPendingMappings(IEnumerable<Guid> ids)
=> this with { PendingMappings = ids?.Distinct().ToArray() ?? EmptyGuidList };
public AlpineCursor WithFetchCache(IDictionary<string, AlpineFetchCacheEntry>? cache)
{
if (cache is null || cache.Count == 0)
{
return this with { FetchCache = EmptyCache };
}
return this with { FetchCache = new Dictionary<string, AlpineFetchCacheEntry>(cache, StringComparer.OrdinalIgnoreCase) };
}
public bool TryGetCache(string key, out AlpineFetchCacheEntry entry)
{
if (FetchCache.Count == 0)
{
entry = AlpineFetchCacheEntry.Empty;
return false;
}
return FetchCache.TryGetValue(key, out entry!);
}
private static IReadOnlyCollection<Guid> ReadGuidSet(DocumentObject document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not DocumentArray array)
{
return EmptyGuidList;
}
var list = new List<Guid>(array.Count);
foreach (var element in array)
{
if (Guid.TryParse(element.ToString(), out var guid))
{
list.Add(guid);
}
}
return list;
}
private static IReadOnlyDictionary<string, AlpineFetchCacheEntry> ReadCache(DocumentObject document)
{
if (!document.TryGetValue("fetchCache", out var value) || value is not DocumentObject cacheDoc || cacheDoc.ElementCount == 0)
{
return EmptyCache;
}
var cache = new Dictionary<string, AlpineFetchCacheEntry>(StringComparer.OrdinalIgnoreCase);
foreach (var element in cacheDoc.Elements)
{
if (element.Value is DocumentObject entryDoc)
{
cache[element.Name] = AlpineFetchCacheEntry.FromDocument(entryDoc);
}
}
return cache;
}
}

View File

@@ -0,0 +1,77 @@
using System;
using StellaOps.Concelier.Documents;
using StorageContracts = StellaOps.Concelier.Storage.Contracts;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal sealed record AlpineFetchCacheEntry(string? ETag, DateTimeOffset? LastModified)
{
public static AlpineFetchCacheEntry Empty { get; } = new(null, null);
public static AlpineFetchCacheEntry FromDocument(StorageContracts.StorageDocument document)
=> new(document.Etag, document.LastModified);
public static AlpineFetchCacheEntry FromDocument(DocumentObject document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
string? etag = null;
DateTimeOffset? lastModified = null;
if (document.TryGetValue("etag", out var etagValue) && etagValue.DocumentType == DocumentType.String)
{
etag = etagValue.AsString;
}
if (document.TryGetValue("lastModified", out var modifiedValue))
{
lastModified = modifiedValue.DocumentType switch
{
DocumentType.DateTime => DateTime.SpecifyKind(modifiedValue.ToUniversalTime(), DateTimeKind.Utc),
DocumentType.String when DateTimeOffset.TryParse(modifiedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null
};
}
return new AlpineFetchCacheEntry(etag, lastModified);
}
public DocumentObject ToDocumentObject()
{
var doc = new DocumentObject();
if (!string.IsNullOrWhiteSpace(ETag))
{
doc["etag"] = ETag;
}
if (LastModified.HasValue)
{
doc["lastModified"] = LastModified.Value.UtcDateTime;
}
return doc;
}
public bool Matches(StorageContracts.StorageDocument document)
{
if (document is null)
{
return false;
}
if (!string.Equals(ETag, document.Etag, StringComparison.Ordinal))
{
return false;
}
if (LastModified.HasValue && document.LastModified.HasValue)
{
return LastModified.Value.UtcDateTime == document.LastModified.Value.UtcDateTime;
}
return !LastModified.HasValue && !document.LastModified.HasValue;
}
}

View File

@@ -0,0 +1,348 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal static class AlpineMapper
{
public static IReadOnlyList<Advisory> Map(AlpineSecDbDto dto, DocumentRecord document, DateTimeOffset recordedAt)
{
ArgumentNullException.ThrowIfNull(dto);
ArgumentNullException.ThrowIfNull(document);
if (dto.Packages is null || dto.Packages.Count == 0)
{
return Array.Empty<Advisory>();
}
var platform = BuildPlatform(dto);
var advisoryBuckets = new Dictionary<string, AdvisoryAccumulator>(StringComparer.OrdinalIgnoreCase);
foreach (var package in dto.Packages)
{
if (string.IsNullOrWhiteSpace(package.Name) || package.Secfixes is null || package.Secfixes.Count == 0)
{
continue;
}
var packageName = package.Name.Trim();
foreach (var (fixedVersion, ids) in package.Secfixes.OrderBy(kvp => kvp.Key, StringComparer.OrdinalIgnoreCase))
{
if (string.IsNullOrWhiteSpace(fixedVersion) || ids is null || ids.Length == 0)
{
continue;
}
var versionValue = fixedVersion.Trim();
foreach (var rawId in ids)
{
if (string.IsNullOrWhiteSpace(rawId))
{
continue;
}
var normalizedId = NormalizeAlias(rawId);
var advisoryKey = BuildAdvisoryKey(normalizedId);
if (string.IsNullOrWhiteSpace(advisoryKey))
{
continue;
}
if (!advisoryBuckets.TryGetValue(advisoryKey, out var bucket))
{
bucket = new AdvisoryAccumulator(advisoryKey, BuildAliases(advisoryKey, normalizedId));
advisoryBuckets[advisoryKey] = bucket;
}
else
{
bucket.Aliases.Add(normalizedId);
bucket.Aliases.Add(advisoryKey);
}
var packageKey = BuildPackageKey(platform, packageName);
if (!bucket.Packages.TryGetValue(packageKey, out var pkgAccumulator))
{
pkgAccumulator = new PackageAccumulator(packageName, platform);
bucket.Packages[packageKey] = pkgAccumulator;
}
var rangeProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"range",
BuildRangeProvenanceKey(normalizedId, platform, packageName, versionValue),
recordedAt);
var packageProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"affected",
BuildPackageProvenanceKey(normalizedId, platform, packageName),
recordedAt);
var vendorExtensions = BuildVendorExtensions(dto, versionValue);
var primitives = vendorExtensions.Count == 0
? null
: new RangePrimitives(
SemVer: null,
Nevra: null,
Evr: null,
VendorExtensions: vendorExtensions);
var rangeExpression = $"fixed:{versionValue}";
var range = new AffectedVersionRange(
rangeKind: "apk",
introducedVersion: null,
fixedVersion: versionValue,
lastAffectedVersion: null,
rangeExpression: rangeExpression,
provenance: rangeProvenance,
primitives: primitives);
pkgAccumulator.Ranges.Add(range);
pkgAccumulator.Provenance.Add(packageProvenance);
pkgAccumulator.Statuses.Add(new AffectedPackageStatus("resolved", packageProvenance));
var normalizedRule = range.ToNormalizedVersionRule(BuildNormalizedNote(platform));
if (normalizedRule is not null)
{
pkgAccumulator.NormalizedRules.Add(normalizedRule);
}
}
}
}
if (advisoryBuckets.Count == 0)
{
return Array.Empty<Advisory>();
}
var fetchProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"document",
document.Uri,
document.FetchedAt.ToUniversalTime());
var published = document.LastModified?.ToUniversalTime() ?? document.FetchedAt.ToUniversalTime();
var advisories = new List<Advisory>(advisoryBuckets.Count);
foreach (var bucket in advisoryBuckets.Values.OrderBy(b => b.AdvisoryKey, StringComparer.OrdinalIgnoreCase))
{
var aliases = bucket.Aliases
.Where(static alias => !string.IsNullOrWhiteSpace(alias))
.Select(static alias => alias.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase)
.ToArray();
var references = BuildReferences(document, recordedAt);
var packages = bucket.Packages.Values
.Select(static pkg => pkg.Build())
.Where(static pkg => pkg.VersionRanges.Length > 0)
.OrderBy(static pkg => pkg.Platform, StringComparer.OrdinalIgnoreCase)
.ThenBy(static pkg => pkg.Identifier, StringComparer.OrdinalIgnoreCase)
.ToArray();
var mappingProvenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"mapping",
bucket.AdvisoryKey,
recordedAt);
advisories.Add(new Advisory(
advisoryKey: bucket.AdvisoryKey,
title: DetermineTitle(aliases, bucket.AdvisoryKey),
summary: null,
language: "en",
published: published,
modified: recordedAt > published ? recordedAt : published,
severity: null,
exploitKnown: false,
aliases: aliases,
references: references,
affectedPackages: packages,
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { fetchProvenance, mappingProvenance }));
}
return advisories;
}
private static string? BuildPlatform(AlpineSecDbDto dto)
{
var release = (dto.DistroVersion ?? string.Empty).Trim();
var repo = (dto.RepoName ?? string.Empty).Trim();
if (string.IsNullOrWhiteSpace(release) && string.IsNullOrWhiteSpace(repo))
{
return null;
}
if (string.IsNullOrWhiteSpace(release))
{
return repo;
}
if (string.IsNullOrWhiteSpace(repo))
{
return release;
}
return $"{release}/{repo}";
}
private static string DetermineTitle(string[] aliases, string advisoryKey)
{
if (aliases.Length > 0)
{
return aliases[0];
}
return advisoryKey;
}
private static AdvisoryReference[] BuildReferences(DocumentRecord document, DateTimeOffset recordedAt)
{
var provenance = new AdvisoryProvenance(
AlpineConnectorPlugin.SourceName,
"reference",
document.Uri,
recordedAt);
return new[]
{
new AdvisoryReference(document.Uri, kind: "advisory", sourceTag: "secdb", summary: null, provenance: provenance)
};
}
private static Dictionary<string, string> BuildVendorExtensions(AlpineSecDbDto dto, string fixedVersion)
{
var extensions = new Dictionary<string, string>(StringComparer.Ordinal);
AddExtension(extensions, "alpine.distroversion", dto.DistroVersion);
AddExtension(extensions, "alpine.repo", dto.RepoName);
AddExtension(extensions, "alpine.fixed", fixedVersion);
AddExtension(extensions, "alpine.urlprefix", dto.UrlPrefix);
return extensions;
}
private static void AddExtension(IDictionary<string, string> extensions, string key, string? value)
{
if (!string.IsNullOrWhiteSpace(value))
{
extensions[key] = value.Trim();
}
}
private static string NormalizeAlias(string value)
{
var trimmed = value.Trim();
if (trimmed.StartsWith("CVE-", StringComparison.OrdinalIgnoreCase))
{
return trimmed.ToUpperInvariant();
}
return trimmed;
}
private static string BuildAdvisoryKey(string normalizedId)
{
if (string.IsNullOrWhiteSpace(normalizedId))
{
return string.Empty;
}
return $"alpine/{normalizedId.ToLowerInvariant()}";
}
private static HashSet<string> BuildAliases(string advisoryKey, string normalizedId)
{
var aliases = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (!string.IsNullOrWhiteSpace(advisoryKey))
{
aliases.Add(advisoryKey);
}
if (!string.IsNullOrWhiteSpace(normalizedId))
{
aliases.Add(normalizedId);
}
return aliases;
}
private static string? BuildNormalizedNote(string? platform)
=> string.IsNullOrWhiteSpace(platform) ? null : $"alpine:{platform.Trim()}";
private static string BuildPackageKey(string? platform, string package)
=> string.IsNullOrWhiteSpace(platform) ? package : $"{platform}:{package}";
private static string BuildRangeProvenanceKey(string advisoryId, string? platform, string package, string fixedVersion)
{
if (string.IsNullOrWhiteSpace(platform))
{
return $"{advisoryId}:{package}:{fixedVersion}";
}
return $"{advisoryId}:{platform}:{package}:{fixedVersion}";
}
private static string BuildPackageProvenanceKey(string advisoryId, string? platform, string package)
{
if (string.IsNullOrWhiteSpace(platform))
{
return $"{advisoryId}:{package}";
}
return $"{advisoryId}:{platform}:{package}";
}
private sealed class AdvisoryAccumulator
{
public AdvisoryAccumulator(string advisoryKey, HashSet<string> aliases)
{
AdvisoryKey = advisoryKey;
Aliases = aliases;
Packages = new Dictionary<string, PackageAccumulator>(StringComparer.OrdinalIgnoreCase);
}
public string AdvisoryKey { get; }
public HashSet<string> Aliases { get; }
public Dictionary<string, PackageAccumulator> Packages { get; }
}
private sealed class PackageAccumulator
{
public PackageAccumulator(string identifier, string? platform)
{
Identifier = identifier;
Platform = platform;
}
public string Identifier { get; }
public string? Platform { get; }
public List<AffectedVersionRange> Ranges { get; } = new();
public List<AffectedPackageStatus> Statuses { get; } = new();
public List<AdvisoryProvenance> Provenance { get; } = new();
public List<NormalizedVersionRule> NormalizedRules { get; } = new();
public AffectedPackage Build()
=> new(
type: AffectedPackageTypes.Apk,
identifier: Identifier,
platform: Platform,
versionRanges: Ranges,
statuses: Statuses,
provenance: Provenance,
normalizedVersions: NormalizedRules);
}
}

View File

@@ -0,0 +1,148 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using StellaOps.Concelier.Connector.Distro.Alpine.Dto;
namespace StellaOps.Concelier.Connector.Distro.Alpine.Internal;
internal static class AlpineSecDbParser
{
public static AlpineSecDbDto Parse(string json)
{
if (string.IsNullOrWhiteSpace(json))
{
throw new ArgumentException("SecDB payload cannot be empty.", nameof(json));
}
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
if (root.ValueKind != JsonValueKind.Object)
{
throw new FormatException("SecDB payload must be a JSON object.");
}
var distroVersion = ReadString(root, "distroversion") ?? string.Empty;
var repoName = ReadString(root, "reponame") ?? string.Empty;
var urlPrefix = ReadString(root, "urlprefix") ?? string.Empty;
var packages = new List<AlpinePackageDto>();
if (root.TryGetProperty("packages", out var packagesElement) && packagesElement.ValueKind == JsonValueKind.Array)
{
foreach (var element in packagesElement.EnumerateArray())
{
if (element.ValueKind != JsonValueKind.Object)
{
continue;
}
if (!element.TryGetProperty("pkg", out var pkgElement) || pkgElement.ValueKind != JsonValueKind.Object)
{
continue;
}
var name = ReadString(pkgElement, "name");
if (string.IsNullOrWhiteSpace(name))
{
continue;
}
var secfixes = ReadSecfixes(pkgElement);
packages.Add(new AlpinePackageDto(name.Trim(), secfixes));
}
}
var orderedPackages = packages
.OrderBy(pkg => pkg.Name, StringComparer.OrdinalIgnoreCase)
.Select(static pkg => pkg with { Secfixes = OrderSecfixes(pkg.Secfixes) })
.ToList();
return new AlpineSecDbDto(distroVersion, repoName, urlPrefix, orderedPackages);
}
private static IReadOnlyDictionary<string, string[]> ReadSecfixes(JsonElement pkgElement)
{
if (!pkgElement.TryGetProperty("secfixes", out var fixesElement) || fixesElement.ValueKind != JsonValueKind.Object)
{
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
foreach (var property in fixesElement.EnumerateObject())
{
var version = property.Name?.Trim();
if (string.IsNullOrWhiteSpace(version))
{
continue;
}
var cves = ReadStringArray(property.Value);
if (cves.Length == 0)
{
continue;
}
result[version] = cves;
}
return result;
}
private static string[] ReadStringArray(JsonElement element)
{
if (element.ValueKind != JsonValueKind.Array)
{
return Array.Empty<string>();
}
var items = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var entry in element.EnumerateArray())
{
if (entry.ValueKind != JsonValueKind.String)
{
continue;
}
var value = entry.GetString();
if (string.IsNullOrWhiteSpace(value))
{
continue;
}
items.Add(value.Trim());
}
return items.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase).ToArray();
}
private static string? ReadString(JsonElement element, string name)
{
if (!element.TryGetProperty(name, out var value) || value.ValueKind != JsonValueKind.String)
{
return null;
}
return value.GetString();
}
private static IReadOnlyDictionary<string, string[]> OrderSecfixes(IReadOnlyDictionary<string, string[]> secfixes)
{
if (secfixes is null || secfixes.Count == 0)
{
return new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
}
var ordered = new Dictionary<string, string[]>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in secfixes.OrderBy(pair => pair.Key, StringComparer.OrdinalIgnoreCase))
{
ordered[pair.Key] = pair.Value
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
return ordered;
}
}

View File

@@ -0,0 +1,46 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Jobs;
namespace StellaOps.Concelier.Connector.Distro.Alpine;
internal static class AlpineJobKinds
{
public const string Fetch = "source:alpine:fetch";
public const string Parse = "source:alpine:parse";
public const string Map = "source:alpine:map";
}
internal sealed class AlpineFetchJob : IJob
{
private readonly AlpineConnector _connector;
public AlpineFetchJob(AlpineConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class AlpineParseJob : IJob
{
private readonly AlpineConnector _connector;
public AlpineParseJob(AlpineConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class AlpineMapJob : IJob
{
private readonly AlpineConnector _connector;
public AlpineMapJob(AlpineConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,17 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,13 @@
# Concelier Alpine Connector Tasks
Local status mirror for `docs/implplan/SPRINT_2000_0003_0001_alpine_connector.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| T1 | DONE | APK version comparer + tests. |
| T2 | DONE | SecDB parser. |
| T3 | DOING | Alpine connector fetch/parse/map. |
| T4 | TODO | DI + config + health check wiring. |
| T5 | TODO | Tests, fixtures, and snapshots. |
Last synced: 2025-12-22 (UTC).