Rename Concelier Source modules to Connector

This commit is contained in:
2025-10-18 20:11:18 +03:00
parent 0137856fdb
commit 6524626230
789 changed files with 1489 additions and 1489 deletions

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.Distro.Suse.Tests")]

View File

@@ -0,0 +1,86 @@
using System;
namespace StellaOps.Concelier.Connector.Distro.Suse.Configuration;
public sealed class SuseOptions
{
public const string HttpClientName = "concelier.suse";
/// <summary>
/// CSV index enumerating CSAF advisories with their last modification timestamps.
/// </summary>
public Uri ChangesEndpoint { get; set; } = new("https://ftp.suse.com/pub/projects/security/csaf/changes.csv");
/// <summary>
/// Base URI where individual CSAF advisories reside (filename appended verbatim).
/// </summary>
public Uri AdvisoryBaseUri { get; set; } = new("https://ftp.suse.com/pub/projects/security/csaf/");
/// <summary>
/// Maximum advisories to fetch per run to bound backfill effort.
/// </summary>
public int MaxAdvisoriesPerFetch { get; set; } = 40;
/// <summary>
/// Initial history window for first-time execution.
/// </summary>
public TimeSpan InitialBackfill { get; set; } = TimeSpan.FromDays(30);
/// <summary>
/// Overlap window applied when resuming to capture late edits.
/// </summary>
public TimeSpan ResumeOverlap { get; set; } = TimeSpan.FromDays(3);
/// <summary>
/// Optional delay between advisory detail fetches.
/// </summary>
public TimeSpan RequestDelay { get; set; } = TimeSpan.Zero;
/// <summary>
/// Custom user agent presented to SUSE endpoints.
/// </summary>
public string UserAgent { get; set; } = "StellaOps.Concelier.Suse/0.1 (+https://stella-ops.org)";
/// <summary>
/// Timeout override applied to HTTP requests (defaults to 60 seconds when unset).
/// </summary>
public TimeSpan FetchTimeout { get; set; } = TimeSpan.FromSeconds(45);
public void Validate()
{
if (ChangesEndpoint is null || !ChangesEndpoint.IsAbsoluteUri)
{
throw new InvalidOperationException("SuseOptions.ChangesEndpoint must be an absolute URI.");
}
if (AdvisoryBaseUri is null || !AdvisoryBaseUri.IsAbsoluteUri)
{
throw new InvalidOperationException("SuseOptions.AdvisoryBaseUri must be an absolute URI.");
}
if (MaxAdvisoriesPerFetch <= 0 || MaxAdvisoriesPerFetch > 250)
{
throw new InvalidOperationException("MaxAdvisoriesPerFetch must be between 1 and 250.");
}
if (InitialBackfill < TimeSpan.Zero || InitialBackfill > TimeSpan.FromDays(365))
{
throw new InvalidOperationException("InitialBackfill must be between 0 and 365 days.");
}
if (ResumeOverlap < TimeSpan.Zero || ResumeOverlap > TimeSpan.FromDays(14))
{
throw new InvalidOperationException("ResumeOverlap must be between 0 and 14 days.");
}
if (FetchTimeout <= TimeSpan.Zero || FetchTimeout > TimeSpan.FromMinutes(5))
{
throw new InvalidOperationException("FetchTimeout must be positive and less than five minutes.");
}
if (RequestDelay < TimeSpan.Zero || RequestDelay > TimeSpan.FromSeconds(10))
{
throw new InvalidOperationException("RequestDelay must be between 0 and 10 seconds.");
}
}
}

View File

@@ -0,0 +1,28 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal sealed record SuseAdvisoryDto(
string AdvisoryId,
string Title,
string? Summary,
DateTimeOffset Published,
IReadOnlyList<string> CveIds,
IReadOnlyList<SusePackageStateDto> Packages,
IReadOnlyList<SuseReferenceDto> References);
internal sealed record SusePackageStateDto(
string Package,
string Platform,
string? Architecture,
string CanonicalNevra,
string? IntroducedVersion,
string? FixedVersion,
string? LastAffectedVersion,
string Status);
internal sealed record SuseReferenceDto(
string Url,
string? Kind,
string? Title);

View File

@@ -0,0 +1,5 @@
using System;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal sealed record SuseChangeRecord(string FileName, DateTimeOffset ModifiedAt);

View File

@@ -0,0 +1,81 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal static class SuseChangesParser
{
public static IReadOnlyList<SuseChangeRecord> Parse(string csv)
{
if (string.IsNullOrWhiteSpace(csv))
{
return Array.Empty<SuseChangeRecord>();
}
var records = new List<SuseChangeRecord>();
using var reader = new StringReader(csv);
string? line;
while ((line = reader.ReadLine()) is not null)
{
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = SplitCsvLine(line);
if (parts.Length < 2)
{
continue;
}
var fileName = parts[0].Trim();
if (string.IsNullOrWhiteSpace(fileName))
{
continue;
}
if (!DateTimeOffset.TryParse(parts[1], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var modifiedAt))
{
continue;
}
records.Add(new SuseChangeRecord(fileName, modifiedAt.ToUniversalTime()));
}
return records;
}
private static string[] SplitCsvLine(string line)
{
var values = new List<string>(2);
var current = string.Empty;
var insideQuotes = false;
foreach (var ch in line)
{
if (ch == '"')
{
insideQuotes = !insideQuotes;
continue;
}
if (ch == ',' && !insideQuotes)
{
values.Add(current);
current = string.Empty;
continue;
}
current += ch;
}
if (!string.IsNullOrEmpty(current))
{
values.Add(current);
}
return values.ToArray();
}
}

View File

@@ -0,0 +1,422 @@
using System;
using System.Buffers.Text;
using System.Collections.Generic;
using System.Globalization;
using System.Text.Json;
using StellaOps.Concelier.Normalization.Distro;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal static class SuseCsafParser
{
public static SuseAdvisoryDto Parse(string json)
{
ArgumentException.ThrowIfNullOrEmpty(json);
using var document = JsonDocument.Parse(json);
var root = document.RootElement;
if (!root.TryGetProperty("document", out var documentElement))
{
throw new InvalidOperationException("CSAF payload missing 'document' element.");
}
var trackingElement = documentElement.GetProperty("tracking");
var advisoryId = trackingElement.TryGetProperty("id", out var idElement)
? idElement.GetString()
: null;
if (string.IsNullOrWhiteSpace(advisoryId))
{
throw new InvalidOperationException("CSAF payload missing tracking.id.");
}
var title = documentElement.TryGetProperty("title", out var titleElement)
? titleElement.GetString()
: advisoryId;
var summary = ExtractSummary(documentElement);
var published = ParseDate(trackingElement, "initial_release_date")
?? ParseDate(trackingElement, "current_release_date")
?? DateTimeOffset.UtcNow;
var references = new List<SuseReferenceDto>();
if (documentElement.TryGetProperty("references", out var referencesElement) &&
referencesElement.ValueKind == JsonValueKind.Array)
{
foreach (var referenceElement in referencesElement.EnumerateArray())
{
var url = referenceElement.TryGetProperty("url", out var urlElement)
? urlElement.GetString()
: null;
if (string.IsNullOrWhiteSpace(url))
{
continue;
}
references.Add(new SuseReferenceDto(
url.Trim(),
referenceElement.TryGetProperty("category", out var categoryElement) ? categoryElement.GetString() : null,
referenceElement.TryGetProperty("summary", out var summaryElement) ? summaryElement.GetString() : null));
}
}
var productLookup = BuildProductLookup(root);
var packageBuilders = new Dictionary<string, PackageStateBuilder>(StringComparer.OrdinalIgnoreCase);
var cveIds = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (root.TryGetProperty("vulnerabilities", out var vulnerabilitiesElement) &&
vulnerabilitiesElement.ValueKind == JsonValueKind.Array)
{
foreach (var vulnerability in vulnerabilitiesElement.EnumerateArray())
{
if (vulnerability.TryGetProperty("cve", out var cveElement))
{
var cve = cveElement.GetString();
if (!string.IsNullOrWhiteSpace(cve))
{
cveIds.Add(cve.Trim());
}
}
if (vulnerability.TryGetProperty("references", out var vulnReferences) &&
vulnReferences.ValueKind == JsonValueKind.Array)
{
foreach (var referenceElement in vulnReferences.EnumerateArray())
{
var url = referenceElement.TryGetProperty("url", out var urlElement)
? urlElement.GetString()
: null;
if (string.IsNullOrWhiteSpace(url))
{
continue;
}
references.Add(new SuseReferenceDto(
url.Trim(),
referenceElement.TryGetProperty("category", out var categoryElement) ? categoryElement.GetString() : null,
referenceElement.TryGetProperty("summary", out var summaryElement) ? summaryElement.GetString() : null));
}
}
if (!vulnerability.TryGetProperty("product_status", out var statusElement) ||
statusElement.ValueKind != JsonValueKind.Object)
{
continue;
}
foreach (var property in statusElement.EnumerateObject())
{
var category = property.Name;
var idArray = property.Value;
if (idArray.ValueKind != JsonValueKind.Array)
{
continue;
}
foreach (var productIdElement in idArray.EnumerateArray())
{
var productId = productIdElement.GetString();
if (string.IsNullOrWhiteSpace(productId))
{
continue;
}
if (!productLookup.TryGetValue(productId, out var product))
{
continue;
}
if (!packageBuilders.TryGetValue(productId, out var builder))
{
builder = new PackageStateBuilder(product);
packageBuilders[productId] = builder;
}
builder.ApplyStatus(category, product);
}
}
}
}
var packages = new List<SusePackageStateDto>(packageBuilders.Count);
foreach (var builder in packageBuilders.Values)
{
if (builder.ShouldEmit)
{
packages.Add(builder.ToDto());
}
}
packages.Sort(static (left, right) =>
{
var compare = string.Compare(left.Platform, right.Platform, StringComparison.OrdinalIgnoreCase);
if (compare != 0)
{
return compare;
}
compare = string.Compare(left.Package, right.Package, StringComparison.OrdinalIgnoreCase);
if (compare != 0)
{
return compare;
}
return string.Compare(left.Architecture, right.Architecture, StringComparison.OrdinalIgnoreCase);
});
var cveList = cveIds.Count == 0
? Array.Empty<string>()
: cveIds.OrderBy(static cve => cve, StringComparer.OrdinalIgnoreCase).ToArray();
return new SuseAdvisoryDto(
advisoryId.Trim(),
string.IsNullOrWhiteSpace(title) ? advisoryId : title!,
summary,
published,
cveList,
packages,
references);
}
private static string? ExtractSummary(JsonElement documentElement)
{
if (!documentElement.TryGetProperty("notes", out var notesElement) || notesElement.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var note in notesElement.EnumerateArray())
{
var category = note.TryGetProperty("category", out var categoryElement)
? categoryElement.GetString()
: null;
if (string.Equals(category, "summary", StringComparison.OrdinalIgnoreCase)
|| string.Equals(category, "description", StringComparison.OrdinalIgnoreCase))
{
return note.TryGetProperty("text", out var textElement) ? textElement.GetString() : null;
}
}
return null;
}
private static DateTimeOffset? ParseDate(JsonElement element, string propertyName)
{
if (!element.TryGetProperty(propertyName, out var dateElement))
{
return null;
}
if (dateElement.ValueKind == JsonValueKind.String &&
DateTimeOffset.TryParse(dateElement.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed))
{
return parsed.ToUniversalTime();
}
return null;
}
private static Dictionary<string, SuseProduct> BuildProductLookup(JsonElement root)
{
var lookup = new Dictionary<string, SuseProduct>(StringComparer.OrdinalIgnoreCase);
if (!root.TryGetProperty("product_tree", out var productTree))
{
return lookup;
}
if (productTree.TryGetProperty("branches", out var branches) && branches.ValueKind == JsonValueKind.Array)
{
TraverseBranches(branches, null, null, lookup);
}
return lookup;
}
private static void TraverseBranches(JsonElement branches, string? platform, string? architecture, IDictionary<string, SuseProduct> lookup)
{
foreach (var branch in branches.EnumerateArray())
{
var category = branch.TryGetProperty("category", out var categoryElement)
? categoryElement.GetString()
: null;
var name = branch.TryGetProperty("name", out var nameElement)
? nameElement.GetString()
: null;
var nextPlatform = platform;
var nextArchitecture = architecture;
if (string.Equals(category, "product_family", StringComparison.OrdinalIgnoreCase) ||
string.Equals(category, "product_name", StringComparison.OrdinalIgnoreCase) ||
string.Equals(category, "product_version", StringComparison.OrdinalIgnoreCase))
{
if (!string.IsNullOrWhiteSpace(name))
{
nextPlatform = name;
}
}
if (string.Equals(category, "architecture", StringComparison.OrdinalIgnoreCase))
{
nextArchitecture = string.IsNullOrWhiteSpace(name) ? null : name;
}
if (branch.TryGetProperty("product", out var productElement) && productElement.ValueKind == JsonValueKind.Object)
{
var productId = productElement.TryGetProperty("product_id", out var idElement)
? idElement.GetString()
: null;
if (!string.IsNullOrWhiteSpace(productId))
{
var productName = productElement.TryGetProperty("name", out var productNameElement)
? productNameElement.GetString()
: productId;
var (platformName, packageSegment) = SplitProductId(productId!, nextPlatform);
if (string.IsNullOrWhiteSpace(packageSegment))
{
packageSegment = productName;
}
if (string.IsNullOrWhiteSpace(packageSegment))
{
continue;
}
if (!Nevra.TryParse(packageSegment, out var nevra) && !Nevra.TryParse(productName ?? packageSegment, out nevra))
{
continue;
}
lookup[productId!] = new SuseProduct(
productId!,
platformName ?? "SUSE",
nevra!,
nextArchitecture ?? nevra!.Architecture);
}
}
if (branch.TryGetProperty("branches", out var childBranches) && childBranches.ValueKind == JsonValueKind.Array)
{
TraverseBranches(childBranches, nextPlatform, nextArchitecture, lookup);
}
}
}
private static (string? Platform, string? Package) SplitProductId(string productId, string? currentPlatform)
{
var separatorIndex = productId.IndexOf(':');
if (separatorIndex < 0)
{
return (currentPlatform, productId);
}
var platform = productId[..separatorIndex];
var package = separatorIndex < productId.Length - 1 ? productId[(separatorIndex + 1)..] : string.Empty;
var platformNormalized = string.IsNullOrWhiteSpace(platform) ? currentPlatform : platform;
var packageNormalized = string.IsNullOrWhiteSpace(package) ? null : package;
return (platformNormalized, packageNormalized);
}
private static string FormatNevraVersion(Nevra nevra)
{
var epochSegment = nevra.HasExplicitEpoch || nevra.Epoch > 0 ? $"{nevra.Epoch}:" : string.Empty;
return $"{epochSegment}{nevra.Version}-{nevra.Release}";
}
private sealed record SuseProduct(string ProductId, string Platform, Nevra Nevra, string? Architecture)
{
public string Package => Nevra.Name;
public string Version => FormatNevraVersion(Nevra);
public string CanonicalNevra => Nevra.ToCanonicalString();
}
private sealed class PackageStateBuilder
{
private readonly SuseProduct _product;
public PackageStateBuilder(SuseProduct product)
{
_product = product;
Status = null;
}
public string Package => _product.Package;
public string Platform => _product.Platform;
public string? Architecture => _product.Architecture;
public string? IntroducedVersion { get; private set; }
public string? FixedVersion { get; private set; }
public string? LastAffectedVersion { get; private set; }
public string? Status { get; private set; }
public bool ShouldEmit => !string.IsNullOrWhiteSpace(Status) && !string.Equals(Status, "not_affected", StringComparison.OrdinalIgnoreCase);
public void ApplyStatus(string category, SuseProduct product)
{
if (string.IsNullOrWhiteSpace(category))
{
return;
}
switch (category.ToLowerInvariant())
{
case "recommended":
case "fixed":
FixedVersion = product.Version;
Status = "resolved";
break;
case "known_affected":
case "known_vulnerable":
LastAffectedVersion = product.Version;
Status ??= "open";
break;
case "first_affected":
IntroducedVersion ??= product.Version;
Status ??= "open";
break;
case "under_investigation":
Status ??= "investigating";
break;
case "known_not_affected":
Status = "not_affected";
IntroducedVersion = null;
FixedVersion = null;
LastAffectedVersion = null;
break;
}
}
public SusePackageStateDto ToDto()
{
var status = Status ?? "unknown";
var introduced = IntroducedVersion;
var lastAffected = LastAffectedVersion;
if (string.Equals(status, "resolved", StringComparison.OrdinalIgnoreCase) && string.IsNullOrWhiteSpace(FixedVersion))
{
status = "open";
}
return new SusePackageStateDto(
Package,
Platform,
Architecture,
_product.CanonicalNevra,
introduced,
FixedVersion,
lastAffected,
status);
}
}
}

View File

@@ -0,0 +1,177 @@
using System;
using System.Collections.Generic;
using System.Linq;
using MongoDB.Bson;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal sealed record SuseCursor(
DateTimeOffset? LastModified,
IReadOnlyCollection<string> ProcessedIds,
IReadOnlyCollection<Guid> PendingDocuments,
IReadOnlyCollection<Guid> PendingMappings,
IReadOnlyDictionary<string, SuseFetchCacheEntry> FetchCache)
{
private static readonly IReadOnlyCollection<string> EmptyStringList = Array.Empty<string>();
private static readonly IReadOnlyCollection<Guid> EmptyGuidList = Array.Empty<Guid>();
private static readonly IReadOnlyDictionary<string, SuseFetchCacheEntry> EmptyCache =
new Dictionary<string, SuseFetchCacheEntry>(StringComparer.OrdinalIgnoreCase);
public static SuseCursor Empty { get; } = new(null, EmptyStringList, EmptyGuidList, EmptyGuidList, EmptyCache);
public static SuseCursor FromBson(BsonDocument? document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
DateTimeOffset? lastModified = null;
if (document.TryGetValue("lastModified", out var lastValue))
{
lastModified = lastValue.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(lastValue.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(lastValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null,
};
}
var processed = ReadStringSet(document, "processedIds");
var pendingDocs = ReadGuidSet(document, "pendingDocuments");
var pendingMappings = ReadGuidSet(document, "pendingMappings");
var cache = ReadCache(document);
return new SuseCursor(lastModified, processed, pendingDocs, pendingMappings, cache);
}
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument
{
["pendingDocuments"] = new BsonArray(PendingDocuments.Select(static id => id.ToString())),
["pendingMappings"] = new BsonArray(PendingMappings.Select(static id => id.ToString())),
};
if (LastModified.HasValue)
{
document["lastModified"] = LastModified.Value.UtcDateTime;
}
if (ProcessedIds.Count > 0)
{
document["processedIds"] = new BsonArray(ProcessedIds);
}
if (FetchCache.Count > 0)
{
var cacheDocument = new BsonDocument();
foreach (var (key, entry) in FetchCache)
{
cacheDocument[key] = entry.ToBsonDocument();
}
document["fetchCache"] = cacheDocument;
}
return document;
}
public SuseCursor WithPendingDocuments(IEnumerable<Guid> ids)
=> this with { PendingDocuments = ids?.Distinct().ToArray() ?? EmptyGuidList };
public SuseCursor WithPendingMappings(IEnumerable<Guid> ids)
=> this with { PendingMappings = ids?.Distinct().ToArray() ?? EmptyGuidList };
public SuseCursor WithFetchCache(IDictionary<string, SuseFetchCacheEntry>? cache)
{
if (cache is null || cache.Count == 0)
{
return this with { FetchCache = EmptyCache };
}
return this with { FetchCache = new Dictionary<string, SuseFetchCacheEntry>(cache, StringComparer.OrdinalIgnoreCase) };
}
public SuseCursor WithProcessed(DateTimeOffset modified, IEnumerable<string> ids)
=> this with
{
LastModified = modified.ToUniversalTime(),
ProcessedIds = ids?.Where(static id => !string.IsNullOrWhiteSpace(id))
.Select(static id => id.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray() ?? EmptyStringList
};
public bool TryGetCache(string key, out SuseFetchCacheEntry entry)
{
if (FetchCache.Count == 0)
{
entry = SuseFetchCacheEntry.Empty;
return false;
}
return FetchCache.TryGetValue(key, out entry!);
}
private static IReadOnlyCollection<string> ReadStringSet(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyStringList;
}
var list = new List<string>(array.Count);
foreach (var element in array)
{
if (element.BsonType == BsonType.String)
{
var str = element.AsString.Trim();
if (!string.IsNullOrWhiteSpace(str))
{
list.Add(str);
}
}
}
return list;
}
private static IReadOnlyCollection<Guid> ReadGuidSet(BsonDocument document, string field)
{
if (!document.TryGetValue(field, out var value) || value is not BsonArray array)
{
return EmptyGuidList;
}
var list = new List<Guid>(array.Count);
foreach (var element in array)
{
if (Guid.TryParse(element.ToString(), out var guid))
{
list.Add(guid);
}
}
return list;
}
private static IReadOnlyDictionary<string, SuseFetchCacheEntry> ReadCache(BsonDocument document)
{
if (!document.TryGetValue("fetchCache", out var value) || value is not BsonDocument cacheDocument || cacheDocument.ElementCount == 0)
{
return EmptyCache;
}
var cache = new Dictionary<string, SuseFetchCacheEntry>(StringComparer.OrdinalIgnoreCase);
foreach (var element in cacheDocument.Elements)
{
if (element.Value is BsonDocument entry)
{
cache[element.Name] = SuseFetchCacheEntry.FromBson(entry);
}
}
return cache;
}
}

View File

@@ -0,0 +1,76 @@
using System;
using MongoDB.Bson;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastModified)
{
public static SuseFetchCacheEntry Empty { get; } = new(null, null);
public static SuseFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
=> new(document.Etag, document.LastModified);
public static SuseFetchCacheEntry FromBson(BsonDocument document)
{
if (document is null || document.ElementCount == 0)
{
return Empty;
}
string? etag = null;
DateTimeOffset? lastModified = null;
if (document.TryGetValue("etag", out var etagValue) && etagValue.BsonType == BsonType.String)
{
etag = etagValue.AsString;
}
if (document.TryGetValue("lastModified", out var modifiedValue))
{
lastModified = modifiedValue.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(modifiedValue.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(modifiedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => null,
};
}
return new SuseFetchCacheEntry(etag, lastModified);
}
public BsonDocument ToBsonDocument()
{
var document = new BsonDocument();
if (!string.IsNullOrWhiteSpace(ETag))
{
document["etag"] = ETag;
}
if (LastModified.HasValue)
{
document["lastModified"] = LastModified.Value.UtcDateTime;
}
return document;
}
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
{
if (document is null)
{
return false;
}
if (!string.Equals(ETag, document.Etag, StringComparison.Ordinal))
{
return false;
}
if (LastModified.HasValue && document.LastModified.HasValue)
{
return LastModified.Value.UtcDateTime == document.LastModified.Value.UtcDateTime;
}
return !LastModified.HasValue && !document.LastModified.HasValue;
}
}

View File

@@ -0,0 +1,342 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Normalization.Distro;
using StellaOps.Concelier.Storage.Mongo.Documents;
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
internal static class SuseMapper
{
public static Advisory Map(SuseAdvisoryDto dto, DocumentRecord document, DateTimeOffset recordedAt)
{
ArgumentNullException.ThrowIfNull(dto);
ArgumentNullException.ThrowIfNull(document);
var aliases = BuildAliases(dto);
var references = BuildReferences(dto, recordedAt);
var packages = BuildPackages(dto, recordedAt);
var fetchProvenance = new AdvisoryProvenance(
SuseConnectorPlugin.SourceName,
"document",
document.Uri,
document.FetchedAt.ToUniversalTime());
var mapProvenance = new AdvisoryProvenance(
SuseConnectorPlugin.SourceName,
"mapping",
dto.AdvisoryId,
recordedAt);
var published = dto.Published;
var modified = DateTimeOffset.Compare(recordedAt, dto.Published) >= 0 ? recordedAt : dto.Published;
return new Advisory(
advisoryKey: dto.AdvisoryId,
title: dto.Title ?? dto.AdvisoryId,
summary: dto.Summary,
language: "en",
published: published,
modified: modified,
severity: null,
exploitKnown: false,
aliases: aliases,
references: references,
affectedPackages: packages,
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { fetchProvenance, mapProvenance });
}
private static string[] BuildAliases(SuseAdvisoryDto dto)
{
var aliases = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
dto.AdvisoryId
};
foreach (var cve in dto.CveIds ?? Array.Empty<string>())
{
if (!string.IsNullOrWhiteSpace(cve))
{
aliases.Add(cve.Trim());
}
}
return aliases.OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase).ToArray();
}
private static AdvisoryReference[] BuildReferences(SuseAdvisoryDto dto, DateTimeOffset recordedAt)
{
if (dto.References is null || dto.References.Count == 0)
{
return Array.Empty<AdvisoryReference>();
}
var references = new List<AdvisoryReference>(dto.References.Count);
foreach (var reference in dto.References)
{
if (string.IsNullOrWhiteSpace(reference.Url))
{
continue;
}
try
{
var provenance = new AdvisoryProvenance(
SuseConnectorPlugin.SourceName,
"reference",
reference.Url,
recordedAt);
references.Add(new AdvisoryReference(
reference.Url.Trim(),
NormalizeReferenceKind(reference.Kind),
reference.Kind,
reference.Title,
provenance));
}
catch (ArgumentException)
{
// Ignore malformed URLs to keep advisory mapping resilient.
}
}
return references.Count == 0
? Array.Empty<AdvisoryReference>()
: references
.OrderBy(static reference => reference.Url, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static string? NormalizeReferenceKind(string? kind)
{
if (string.IsNullOrWhiteSpace(kind))
{
return null;
}
return kind.Trim().ToLowerInvariant() switch
{
"cve" => "cve",
"self" => "advisory",
"external" => "external",
_ => null,
};
}
private static IReadOnlyList<AffectedPackage> BuildPackages(SuseAdvisoryDto dto, DateTimeOffset recordedAt)
{
if (dto.Packages is null || dto.Packages.Count == 0)
{
return Array.Empty<AffectedPackage>();
}
var packages = new List<AffectedPackage>(dto.Packages.Count);
foreach (var package in dto.Packages)
{
if (string.IsNullOrWhiteSpace(package.CanonicalNevra))
{
continue;
}
Nevra? nevra;
if (!Nevra.TryParse(package.CanonicalNevra, out nevra))
{
continue;
}
var affectedProvenance = new AdvisoryProvenance(
SuseConnectorPlugin.SourceName,
"affected",
$"{package.Platform}:{package.CanonicalNevra}",
recordedAt);
var ranges = BuildVersionRanges(package, nevra!, recordedAt);
if (ranges.Count == 0 && string.Equals(package.Status, "not_affected", StringComparison.OrdinalIgnoreCase))
{
continue;
}
var normalizedVersions = BuildNormalizedVersions(package, ranges);
packages.Add(new AffectedPackage(
AffectedPackageTypes.Rpm,
identifier: nevra!.ToCanonicalString(),
platform: package.Platform,
versionRanges: ranges,
statuses: BuildStatuses(package, affectedProvenance),
provenance: new[] { affectedProvenance },
normalizedVersions: normalizedVersions));
}
return packages.Count == 0
? Array.Empty<AffectedPackage>()
: packages
.OrderBy(static pkg => pkg.Platform, StringComparer.OrdinalIgnoreCase)
.ThenBy(static pkg => pkg.Identifier, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static IReadOnlyList<AffectedPackageStatus> BuildStatuses(SusePackageStateDto package, AdvisoryProvenance provenance)
{
if (string.IsNullOrWhiteSpace(package.Status))
{
return Array.Empty<AffectedPackageStatus>();
}
return new[]
{
new AffectedPackageStatus(package.Status, provenance)
};
}
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(SusePackageStateDto package, Nevra nevra, DateTimeOffset recordedAt)
{
var introducedComponent = ParseNevraComponent(package.IntroducedVersion, nevra);
var fixedComponent = ParseNevraComponent(package.FixedVersion, nevra);
var lastAffectedComponent = ParseNevraComponent(package.LastAffectedVersion, nevra);
if (introducedComponent is null && fixedComponent is null && lastAffectedComponent is null)
{
return Array.Empty<AffectedVersionRange>();
}
var rangeProvenance = new AdvisoryProvenance(
SuseConnectorPlugin.SourceName,
"range",
$"{package.Platform}:{nevra.ToCanonicalString()}",
recordedAt);
var extensions = new Dictionary<string, string>(StringComparer.Ordinal)
{
["suse.status"] = package.Status
};
var rangeExpression = BuildRangeExpression(package.IntroducedVersion, package.FixedVersion, package.LastAffectedVersion);
var range = new AffectedVersionRange(
rangeKind: "nevra",
introducedVersion: package.IntroducedVersion,
fixedVersion: package.FixedVersion,
lastAffectedVersion: package.LastAffectedVersion,
rangeExpression: rangeExpression,
provenance: rangeProvenance,
primitives: new RangePrimitives(
SemVer: null,
Nevra: new NevraPrimitive(introducedComponent, fixedComponent, lastAffectedComponent),
Evr: null,
VendorExtensions: extensions));
return new[] { range };
}
private static NevraComponent? ParseNevraComponent(string? version, Nevra nevra)
{
if (string.IsNullOrWhiteSpace(version))
{
return null;
}
if (!TrySplitNevraVersion(version.Trim(), out var epoch, out var ver, out var rel))
{
return null;
}
return new NevraComponent(
nevra.Name,
epoch,
ver,
rel,
string.IsNullOrWhiteSpace(nevra.Architecture) ? null : nevra.Architecture);
}
private static bool TrySplitNevraVersion(string value, out int epoch, out string version, out string release)
{
epoch = 0;
version = string.Empty;
release = string.Empty;
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
var dashIndex = trimmed.LastIndexOf('-');
if (dashIndex <= 0 || dashIndex >= trimmed.Length - 1)
{
return false;
}
release = trimmed[(dashIndex + 1)..];
var versionSegment = trimmed[..dashIndex];
var epochIndex = versionSegment.IndexOf(':');
if (epochIndex >= 0)
{
var epochPart = versionSegment[..epochIndex];
version = epochIndex < versionSegment.Length - 1 ? versionSegment[(epochIndex + 1)..] : string.Empty;
if (epochPart.Length > 0 && !int.TryParse(epochPart, NumberStyles.Integer, CultureInfo.InvariantCulture, out epoch))
{
epoch = 0;
return false;
}
}
else
{
version = versionSegment;
}
return !string.IsNullOrWhiteSpace(version) && !string.IsNullOrWhiteSpace(release);
}
private static string? BuildRangeExpression(string? introduced, string? fixedVersion, string? lastAffected)
{
var parts = new List<string>(3);
if (!string.IsNullOrWhiteSpace(introduced))
{
parts.Add($"introduced:{introduced}");
}
if (!string.IsNullOrWhiteSpace(fixedVersion))
{
parts.Add($"fixed:{fixedVersion}");
}
if (!string.IsNullOrWhiteSpace(lastAffected))
{
parts.Add($"last:{lastAffected}");
}
return parts.Count == 0 ? null : string.Join(" ", parts);
}
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
SusePackageStateDto package,
IReadOnlyList<AffectedVersionRange> ranges)
{
if (ranges.Count == 0)
{
return Array.Empty<NormalizedVersionRule>();
}
var note = string.IsNullOrWhiteSpace(package.Platform)
? null
: $"suse:{package.Platform.Trim()}";
var rules = new List<NormalizedVersionRule>(ranges.Count);
foreach (var range in ranges)
{
var rule = range.ToNormalizedVersionRule(note);
if (rule is not null)
{
rules.Add(rule);
}
}
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
}
}

View File

@@ -0,0 +1,46 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Jobs;
namespace StellaOps.Concelier.Connector.Distro.Suse;
internal static class SuseJobKinds
{
public const string Fetch = "source:suse:fetch";
public const string Parse = "source:suse:parse";
public const string Map = "source:suse:map";
}
internal sealed class SuseFetchJob : IJob
{
private readonly SuseConnector _connector;
public SuseFetchJob(SuseConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.FetchAsync(context.Services, cancellationToken);
}
internal sealed class SuseParseJob : IJob
{
private readonly SuseConnector _connector;
public SuseParseJob(SuseConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.ParseAsync(context.Services, cancellationToken);
}
internal sealed class SuseMapJob : IJob
{
private readonly SuseConnector _connector;
public SuseMapJob(SuseConnector connector)
=> _connector = connector ?? throw new ArgumentNullException(nameof(connector));
public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
=> _connector.MapAsync(context.Services, cancellationToken);
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,573 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Bson.IO;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
using StellaOps.Concelier.Connector.Distro.Suse.Internal;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Suse;
public sealed class SuseConnector : IFeedConnector
{
private static readonly Action<ILogger, string, int, Exception?> LogMapped =
LoggerMessage.Define<string, int>(
LogLevel.Information,
new EventId(1, "SuseMapped"),
"SUSE advisory {AdvisoryId} mapped with {AffectedCount} affected packages");
private readonly SourceFetchService _fetchService;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly SuseOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<SuseConnector> _logger;
public SuseConnector(
SourceFetchService fetchService,
RawDocumentStorage rawDocumentStorage,
IDocumentStore documentStore,
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<SuseOptions> options,
TimeProvider? timeProvider,
ILogger<SuseConnector> logger)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string SourceName => SuseConnectorPlugin.SourceName;
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var pendingDocuments = new HashSet<Guid>(cursor.PendingDocuments);
var pendingMappings = new HashSet<Guid>(cursor.PendingMappings);
var fetchCache = new Dictionary<string, SuseFetchCacheEntry>(cursor.FetchCache, StringComparer.OrdinalIgnoreCase);
var touchedResources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var changesUri = _options.ChangesEndpoint;
var changesKey = changesUri.ToString();
touchedResources.Add(changesKey);
cursor.TryGetCache(changesKey, out var cachedChanges);
var changesRequest = new SourceFetchRequest(SuseOptions.HttpClientName, SourceName, changesUri)
{
Metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["suse.type"] = "changes"
},
AcceptHeaders = new[] { "text/csv", "text/plain" },
TimeoutOverride = _options.FetchTimeout,
ETag = cachedChanges?.ETag,
LastModified = cachedChanges?.LastModified,
};
SourceFetchResult changesResult;
try
{
changesResult = await _fetchService.FetchAsync(changesRequest, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "SUSE changes.csv fetch failed from {Uri}", changesUri);
await _stateRepository.MarkFailureAsync(SourceName, now, TimeSpan.FromMinutes(5), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
var maxModified = cursor.LastModified ?? DateTimeOffset.MinValue;
var processedUpdated = false;
var processedIds = new HashSet<string>(cursor.ProcessedIds, StringComparer.OrdinalIgnoreCase);
var currentWindowIds = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
IReadOnlyList<SuseChangeRecord> changeRecords = Array.Empty<SuseChangeRecord>();
if (changesResult.IsNotModified)
{
if (cursor.FetchCache.TryGetValue(changesKey, out var existingCache))
{
fetchCache[changesKey] = existingCache;
}
}
else if (changesResult.IsSuccess && changesResult.Document is not null)
{
fetchCache[changesKey] = SuseFetchCacheEntry.FromDocument(changesResult.Document);
if (changesResult.Document.GridFsId.HasValue)
{
byte[] changesBytes;
try
{
changesBytes = await _rawDocumentStorage.DownloadAsync(changesResult.Document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to download SUSE changes.csv document {DocumentId}", changesResult.Document.Id);
throw;
}
var csv = Encoding.UTF8.GetString(changesBytes);
changeRecords = SuseChangesParser.Parse(csv);
}
}
if (changeRecords.Count > 0)
{
var baseline = (cursor.LastModified ?? (now - _options.InitialBackfill)) - _options.ResumeOverlap;
if (baseline < DateTimeOffset.UnixEpoch)
{
baseline = DateTimeOffset.UnixEpoch;
}
ProvenanceDiagnostics.ReportResumeWindow(SourceName, baseline, _logger);
var candidates = changeRecords
.Where(record => record.ModifiedAt >= baseline)
.OrderBy(record => record.ModifiedAt)
.ThenBy(record => record.FileName, StringComparer.OrdinalIgnoreCase)
.ToList();
if (candidates.Count == 0)
{
candidates = changeRecords
.OrderByDescending(record => record.ModifiedAt)
.ThenBy(record => record.FileName, StringComparer.OrdinalIgnoreCase)
.Take(_options.MaxAdvisoriesPerFetch)
.OrderBy(record => record.ModifiedAt)
.ThenBy(record => record.FileName, StringComparer.OrdinalIgnoreCase)
.ToList();
}
else if (candidates.Count > _options.MaxAdvisoriesPerFetch)
{
candidates = candidates
.OrderByDescending(record => record.ModifiedAt)
.ThenBy(record => record.FileName, StringComparer.OrdinalIgnoreCase)
.Take(_options.MaxAdvisoriesPerFetch)
.OrderBy(record => record.ModifiedAt)
.ThenBy(record => record.FileName, StringComparer.OrdinalIgnoreCase)
.ToList();
}
foreach (var record in candidates)
{
cancellationToken.ThrowIfCancellationRequested();
var detailUri = new Uri(_options.AdvisoryBaseUri, record.FileName);
var cacheKey = detailUri.AbsoluteUri;
touchedResources.Add(cacheKey);
cursor.TryGetCache(cacheKey, out var cachedEntry);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, cacheKey, cancellationToken).ConfigureAwait(false);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["suse.file"] = record.FileName,
["suse.modified"] = record.ModifiedAt.ToString("O", CultureInfo.InvariantCulture)
};
if (!metadata.ContainsKey("suse.id") && existing?.Metadata?.TryGetValue("suse.id", out var existingId) == true)
{
metadata["suse.id"] = existingId;
}
var request = new SourceFetchRequest(SuseOptions.HttpClientName, SourceName, detailUri)
{
Metadata = metadata,
AcceptHeaders = new[] { "application/json", "text/json" },
TimeoutOverride = _options.FetchTimeout,
ETag = existing?.Etag ?? cachedEntry?.ETag,
LastModified = existing?.LastModified ?? cachedEntry?.LastModified,
};
SourceFetchResult result;
try
{
result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to fetch SUSE advisory {FileName}", record.FileName);
await _stateRepository.MarkFailureAsync(SourceName, now, TimeSpan.FromMinutes(5), ex.Message, cancellationToken).ConfigureAwait(false);
throw;
}
if (result.IsNotModified)
{
if (existing is not null)
{
fetchCache[cacheKey] = SuseFetchCacheEntry.FromDocument(existing);
if (string.Equals(existing.Status, DocumentStatuses.Mapped, StringComparison.Ordinal))
{
pendingDocuments.Remove(existing.Id);
pendingMappings.Remove(existing.Id);
}
}
continue;
}
if (!result.IsSuccess || result.Document is null)
{
continue;
}
fetchCache[cacheKey] = SuseFetchCacheEntry.FromDocument(result.Document);
pendingDocuments.Add(result.Document.Id);
pendingMappings.Remove(result.Document.Id);
currentWindowIds.Add(record.FileName);
if (record.ModifiedAt > maxModified)
{
maxModified = record.ModifiedAt;
processedUpdated = true;
}
}
}
if (fetchCache.Count > 0 && touchedResources.Count > 0)
{
var staleKeys = fetchCache.Keys.Where(key => !touchedResources.Contains(key)).ToArray();
foreach (var key in staleKeys)
{
fetchCache.Remove(key);
}
}
var updatedCursor = cursor
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings)
.WithFetchCache(fetchCache);
if (processedUpdated && currentWindowIds.Count > 0)
{
updatedCursor = updatedCursor.WithProcessed(maxModified, currentWindowIds);
}
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingDocuments.Count == 0)
{
return;
}
var remaining = cursor.PendingDocuments.ToList();
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingDocuments)
{
cancellationToken.ThrowIfCancellationRequested();
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (document is null)
{
remaining.Remove(documentId);
continue;
}
if (!document.GridFsId.HasValue)
{
_logger.LogWarning("SUSE document {DocumentId} missing GridFS payload", document.Id);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
continue;
}
byte[] bytes;
try
{
bytes = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to download SUSE document {DocumentId}", document.Id);
throw;
}
SuseAdvisoryDto dto;
try
{
var json = Encoding.UTF8.GetString(bytes);
dto = SuseCsafParser.Parse(json);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to parse SUSE advisory {Uri}", document.Uri);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
continue;
}
var metadata = document.Metadata is null
? new Dictionary<string, string>(StringComparer.Ordinal)
: new Dictionary<string, string>(document.Metadata, StringComparer.Ordinal);
metadata["suse.id"] = dto.AdvisoryId;
var updatedDocument = document with { Metadata = metadata };
await _documentStore.UpsertAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
var payload = ToBson(dto);
var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, SourceName, "suse.csaf.v1", payload, _timeProvider.GetUtcNow());
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
remaining.Remove(documentId);
if (!pendingMappings.Contains(documentId))
{
pendingMappings.Add(documentId);
}
}
var updatedCursor = cursor
.WithPendingDocuments(remaining)
.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(services);
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
if (cursor.PendingMappings.Count == 0)
{
return;
}
var pendingMappings = cursor.PendingMappings.ToList();
foreach (var documentId in cursor.PendingMappings)
{
cancellationToken.ThrowIfCancellationRequested();
var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
if (dtoRecord is null || document is null)
{
pendingMappings.Remove(documentId);
continue;
}
SuseAdvisoryDto dto;
try
{
dto = FromBson(dtoRecord.Payload);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to deserialize SUSE DTO for document {DocumentId}", documentId);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
continue;
}
var advisory = SuseMapper.Map(dto, document, _timeProvider.GetUtcNow());
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
pendingMappings.Remove(documentId);
LogMapped(_logger, dto.AdvisoryId, advisory.AffectedPackages.Length, null);
}
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
}
private async Task<SuseCursor> GetCursorAsync(CancellationToken cancellationToken)
{
var state = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
return state is null ? SuseCursor.Empty : SuseCursor.FromBson(state.Cursor);
}
private async Task UpdateCursorAsync(SuseCursor cursor, CancellationToken cancellationToken)
{
var document = cursor.ToBsonDocument();
await _stateRepository.UpdateCursorAsync(SourceName, document, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private static BsonDocument ToBson(SuseAdvisoryDto dto)
{
var packages = new BsonArray();
foreach (var package in dto.Packages)
{
var packageDoc = new BsonDocument
{
["package"] = package.Package,
["platform"] = package.Platform,
["canonical"] = package.CanonicalNevra,
["status"] = package.Status
};
if (!string.IsNullOrWhiteSpace(package.Architecture))
{
packageDoc["arch"] = package.Architecture;
}
if (!string.IsNullOrWhiteSpace(package.IntroducedVersion))
{
packageDoc["introduced"] = package.IntroducedVersion;
}
if (!string.IsNullOrWhiteSpace(package.FixedVersion))
{
packageDoc["fixed"] = package.FixedVersion;
}
if (!string.IsNullOrWhiteSpace(package.LastAffectedVersion))
{
packageDoc["last"] = package.LastAffectedVersion;
}
packages.Add(packageDoc);
}
var references = new BsonArray();
foreach (var reference in dto.References)
{
var referenceDoc = new BsonDocument
{
["url"] = reference.Url
};
if (!string.IsNullOrWhiteSpace(reference.Kind))
{
referenceDoc["kind"] = reference.Kind;
}
if (!string.IsNullOrWhiteSpace(reference.Title))
{
referenceDoc["title"] = reference.Title;
}
references.Add(referenceDoc);
}
return new BsonDocument
{
["advisoryId"] = dto.AdvisoryId,
["title"] = dto.Title ?? string.Empty,
["summary"] = dto.Summary ?? string.Empty,
["published"] = dto.Published.UtcDateTime,
["cves"] = new BsonArray(dto.CveIds ?? Array.Empty<string>()),
["packages"] = packages,
["references"] = references
};
}
private static SuseAdvisoryDto FromBson(BsonDocument document)
{
var advisoryId = document.GetValue("advisoryId", string.Empty).AsString;
var title = document.GetValue("title", advisoryId).AsString;
var summary = document.TryGetValue("summary", out var summaryValue) ? summaryValue.AsString : null;
var published = document.TryGetValue("published", out var publishedValue)
? publishedValue.BsonType switch
{
BsonType.DateTime => DateTime.SpecifyKind(publishedValue.ToUniversalTime(), DateTimeKind.Utc),
BsonType.String when DateTimeOffset.TryParse(publishedValue.AsString, out var parsed) => parsed.ToUniversalTime(),
_ => DateTimeOffset.UtcNow
}
: DateTimeOffset.UtcNow;
var cves = document.TryGetValue("cves", out var cveArray) && cveArray is BsonArray bsonCves
? bsonCves.OfType<BsonValue>()
.Select(static value => value?.ToString())
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value!)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray()
: Array.Empty<string>();
var packageList = new List<SusePackageStateDto>();
if (document.TryGetValue("packages", out var packageArray) && packageArray is BsonArray bsonPackages)
{
foreach (var element in bsonPackages.OfType<BsonDocument>())
{
var package = element.GetValue("package", string.Empty).AsString;
var platform = element.GetValue("platform", string.Empty).AsString;
var canonical = element.GetValue("canonical", string.Empty).AsString;
var status = element.GetValue("status", "unknown").AsString;
var architecture = element.TryGetValue("arch", out var archValue) ? archValue.AsString : null;
var introduced = element.TryGetValue("introduced", out var introducedValue) ? introducedValue.AsString : null;
var fixedVersion = element.TryGetValue("fixed", out var fixedValue) ? fixedValue.AsString : null;
var last = element.TryGetValue("last", out var lastValue) ? lastValue.AsString : null;
packageList.Add(new SusePackageStateDto(
package,
platform,
architecture,
canonical,
introduced,
fixedVersion,
last,
status));
}
}
var referenceList = new List<SuseReferenceDto>();
if (document.TryGetValue("references", out var referenceArray) && referenceArray is BsonArray bsonReferences)
{
foreach (var element in bsonReferences.OfType<BsonDocument>())
{
var url = element.GetValue("url", string.Empty).AsString;
if (string.IsNullOrWhiteSpace(url))
{
continue;
}
referenceList.Add(new SuseReferenceDto(
url,
element.TryGetValue("kind", out var kindValue) ? kindValue.AsString : null,
element.TryGetValue("title", out var titleValue) ? titleValue.AsString : null));
}
}
return new SuseAdvisoryDto(
advisoryId,
string.IsNullOrWhiteSpace(title) ? advisoryId : title,
string.IsNullOrWhiteSpace(summary) ? null : summary,
published,
cves,
packageList,
referenceList);
}
}

View File

@@ -0,0 +1,20 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Connector.Distro.Suse;
public sealed class SuseConnectorPlugin : IConnectorPlugin
{
public const string SourceName = "distro-suse";
public string Name => SourceName;
public bool IsAvailable(IServiceProvider services) => services is not null;
public IFeedConnector Create(IServiceProvider services)
{
ArgumentNullException.ThrowIfNull(services);
return ActivatorUtilities.CreateInstance<SuseConnector>(services);
}
}

View File

@@ -0,0 +1,53 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
namespace StellaOps.Concelier.Connector.Distro.Suse;
public sealed class SuseDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "concelier:sources:suse";
private const string FetchCron = "*/30 * * * *";
private const string ParseCron = "5,35 * * * *";
private const string MapCron = "10,40 * * * *";
private static readonly TimeSpan FetchTimeout = TimeSpan.FromMinutes(6);
private static readonly TimeSpan ParseTimeout = TimeSpan.FromMinutes(10);
private static readonly TimeSpan MapTimeout = TimeSpan.FromMinutes(10);
private static readonly TimeSpan LeaseDuration = TimeSpan.FromMinutes(5);
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddSuseConnector(options =>
{
configuration.GetSection(ConfigurationSection).Bind(options);
options.Validate();
});
var scheduler = new JobSchedulerBuilder(services);
scheduler
.AddJob<SuseFetchJob>(
SuseJobKinds.Fetch,
cronExpression: FetchCron,
timeout: FetchTimeout,
leaseDuration: LeaseDuration)
.AddJob<SuseParseJob>(
SuseJobKinds.Parse,
cronExpression: ParseCron,
timeout: ParseTimeout,
leaseDuration: LeaseDuration)
.AddJob<SuseMapJob>(
SuseJobKinds.Map,
cronExpression: MapCron,
timeout: MapTimeout,
leaseDuration: LeaseDuration);
return services;
}
}

View File

@@ -0,0 +1,35 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Connector.Common.Http;
using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
namespace StellaOps.Concelier.Connector.Distro.Suse;
public static class SuseServiceCollectionExtensions
{
public static IServiceCollection AddSuseConnector(this IServiceCollection services, Action<SuseOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
services.AddOptions<SuseOptions>()
.Configure(configure)
.PostConfigure(static opts => opts.Validate());
services.AddSourceHttpClient(SuseOptions.HttpClientName, (sp, httpOptions) =>
{
var options = sp.GetRequiredService<IOptions<SuseOptions>>().Value;
httpOptions.BaseAddress = new Uri(options.AdvisoryBaseUri.GetLeftPart(UriPartial.Authority), UriKind.Absolute);
httpOptions.Timeout = options.FetchTimeout;
httpOptions.UserAgent = options.UserAgent;
httpOptions.AllowedHosts.Clear();
httpOptions.AllowedHosts.Add(options.AdvisoryBaseUri.Host);
httpOptions.AllowedHosts.Add(options.ChangesEndpoint.Host);
httpOptions.DefaultRequestHeaders["Accept"] = "text/csv,application/json;q=0.9,text/plain;q=0.8";
});
services.AddTransient<SuseConnector>();
return services;
}
}