This commit is contained in:
master
2025-10-12 20:37:18 +03:00
parent 016c5a3fe7
commit d3a98326d1
306 changed files with 21409 additions and 4449 deletions

View File

@@ -20,6 +20,10 @@ public sealed class GhsaOptions
public TimeSpan FailureBackoff { get; set; } = TimeSpan.FromMinutes(5);
public int RateLimitWarningThreshold { get; set; } = 500;
public TimeSpan SecondaryRateLimitBackoff { get; set; } = TimeSpan.FromMinutes(2);
[MemberNotNull(nameof(BaseEndpoint), nameof(ApiToken))]
public void Validate()
{
@@ -57,5 +61,15 @@ public sealed class GhsaOptions
{
throw new InvalidOperationException("FailureBackoff must be greater than zero.");
}
if (RateLimitWarningThreshold < 0)
{
throw new InvalidOperationException("RateLimitWarningThreshold cannot be negative.");
}
if (SecondaryRateLimitBackoff <= TimeSpan.Zero)
{
throw new InvalidOperationException("SecondaryRateLimitBackoff must be greater than zero.");
}
}
}

View File

@@ -90,6 +90,7 @@ public sealed class GhsaConnector : IFeedConnector
var page = cursor.NextPage <= 0 ? 1 : cursor.NextPage;
var pagesFetched = 0;
var hasMore = true;
var rateLimitHit = false;
DateTimeOffset? maxUpdated = cursor.LastUpdatedExclusive;
while (hasMore && pagesFetched < _options.MaxPagesPerFetch)
@@ -139,6 +140,13 @@ public sealed class GhsaConnector : IFeedConnector
break;
}
var deferList = await ApplyRateLimitAsync(listResult.Headers, "list", cancellationToken).ConfigureAwait(false);
if (deferList)
{
rateLimitHit = true;
break;
}
var pageModel = GhsaListParser.Parse(listResult.Content, page, _options.PageSize);
if (pageModel.Items.Count == 0)
@@ -195,6 +203,18 @@ public sealed class GhsaConnector : IFeedConnector
_diagnostics.FetchDocument();
pendingDocuments.Add(detailResult.Document.Id);
pendingMappings.Add(detailResult.Document.Id);
var deferDetail = await ApplyRateLimitAsync(detailResult.Document.Headers, "detail", cancellationToken).ConfigureAwait(false);
if (deferDetail)
{
rateLimitHit = true;
break;
}
}
if (rateLimitHit)
{
break;
}
if (pageModel.MaxUpdated.HasValue)
@@ -209,7 +229,7 @@ public sealed class GhsaConnector : IFeedConnector
page = pageModel.NextPageCandidate;
pagesFetched++;
if (hasMore && _options.RequestDelay > TimeSpan.Zero)
if (!rateLimitHit && hasMore && _options.RequestDelay > TimeSpan.Zero)
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
@@ -219,7 +239,7 @@ public sealed class GhsaConnector : IFeedConnector
.WithPendingDocuments(pendingDocuments)
.WithPendingMappings(pendingMappings);
if (hasMore)
if (hasMore || rateLimitHit)
{
updatedCursor = updatedCursor
.WithCurrentWindowStart(since)
@@ -391,4 +411,50 @@ public sealed class GhsaConnector : IFeedConnector
{
await _stateRepository.UpdateCursorAsync(SourceName, cursor.ToBsonDocument(), _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private async Task<bool> ApplyRateLimitAsync(IReadOnlyDictionary<string, string>? headers, string phase, CancellationToken cancellationToken)
{
var snapshot = GhsaRateLimitParser.TryParse(headers, _timeProvider.GetUtcNow(), phase);
if (snapshot is null || !snapshot.Value.HasData)
{
return false;
}
_diagnostics.RecordRateLimit(snapshot.Value);
if (snapshot.Value.Remaining.HasValue && snapshot.Value.Remaining.Value <= _options.RateLimitWarningThreshold)
{
var resetMessage = snapshot.Value.ResetAfter.HasValue
? $" (resets in {snapshot.Value.ResetAfter.Value:c})"
: snapshot.Value.ResetAt.HasValue ? $" (resets at {snapshot.Value.ResetAt.Value:O})" : string.Empty;
_logger.LogWarning(
"GHSA rate limit warning: remaining {Remaining} of {Limit} for {Phase} {Resource}{ResetMessage}",
snapshot.Value.Remaining,
snapshot.Value.Limit,
phase,
snapshot.Value.Resource ?? "global",
resetMessage);
}
if (snapshot.Value.Remaining.HasValue && snapshot.Value.Remaining.Value <= 0)
{
_diagnostics.RateLimitExhausted(phase);
var delay = snapshot.Value.RetryAfter ?? snapshot.Value.ResetAfter ?? _options.SecondaryRateLimitBackoff;
if (delay > TimeSpan.Zero)
{
_logger.LogWarning(
"GHSA rate limit exhausted for {Phase} {Resource}; delaying {Delay}",
phase,
snapshot.Value.Resource ?? "global",
delay);
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
}
return true;
}
return false;
}
}

View File

@@ -1,3 +1,4 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.DependencyInjection;
@@ -9,6 +10,14 @@ namespace StellaOps.Feedser.Source.Ghsa;
public sealed class GhsaDependencyInjectionRoutine : IDependencyInjectionRoutine
{
private const string ConfigurationSection = "feedser:sources:ghsa";
private const string FetchCron = "1,11,21,31,41,51 * * * *";
private const string ParseCron = "3,13,23,33,43,53 * * * *";
private const string MapCron = "5,15,25,35,45,55 * * * *";
private static readonly TimeSpan FetchTimeout = TimeSpan.FromMinutes(6);
private static readonly TimeSpan ParseTimeout = TimeSpan.FromMinutes(5);
private static readonly TimeSpan MapTimeout = TimeSpan.FromMinutes(5);
private static readonly TimeSpan LeaseDuration = TimeSpan.FromMinutes(4);
public IServiceCollection Register(IServiceCollection services, IConfiguration configuration)
{
@@ -21,33 +30,24 @@ public sealed class GhsaDependencyInjectionRoutine : IDependencyInjectionRoutine
options.Validate();
});
services.AddTransient<GhsaFetchJob>();
services.AddTransient<GhsaParseJob>();
services.AddTransient<GhsaMapJob>();
services.PostConfigure<JobSchedulerOptions>(options =>
{
EnsureJob(options, GhsaJobKinds.Fetch, typeof(GhsaFetchJob));
EnsureJob(options, GhsaJobKinds.Parse, typeof(GhsaParseJob));
EnsureJob(options, GhsaJobKinds.Map, typeof(GhsaMapJob));
});
var scheduler = new JobSchedulerBuilder(services);
scheduler
.AddJob<GhsaFetchJob>(
GhsaJobKinds.Fetch,
cronExpression: FetchCron,
timeout: FetchTimeout,
leaseDuration: LeaseDuration)
.AddJob<GhsaParseJob>(
GhsaJobKinds.Parse,
cronExpression: ParseCron,
timeout: ParseTimeout,
leaseDuration: LeaseDuration)
.AddJob<GhsaMapJob>(
GhsaJobKinds.Map,
cronExpression: MapCron,
timeout: MapTimeout,
leaseDuration: LeaseDuration);
return services;
}
private static void EnsureJob(JobSchedulerOptions options, string kind, Type jobType)
{
if (options.Definitions.ContainsKey(kind))
{
return;
}
options.Definitions[kind] = new JobDefinition(
kind,
jobType,
options.DefaultTimeout,
options.DefaultLeaseDuration,
CronExpression: null,
Enabled: true);
}
}

View File

@@ -16,6 +16,12 @@ public sealed class GhsaDiagnostics : IDisposable
private readonly Counter<long> _parseFailures;
private readonly Counter<long> _parseQuarantine;
private readonly Counter<long> _mapSuccess;
private readonly Histogram<long> _rateLimitRemaining;
private readonly Histogram<long> _rateLimitLimit;
private readonly Histogram<double> _rateLimitResetSeconds;
private readonly Counter<long> _rateLimitExhausted;
private readonly object _rateLimitLock = new();
private GhsaRateLimitSnapshot? _lastRateLimitSnapshot;
public GhsaDiagnostics()
{
@@ -28,6 +34,10 @@ public sealed class GhsaDiagnostics : IDisposable
_parseFailures = _meter.CreateCounter<long>("ghsa.parse.failures", unit: "documents");
_parseQuarantine = _meter.CreateCounter<long>("ghsa.parse.quarantine", unit: "documents");
_mapSuccess = _meter.CreateCounter<long>("ghsa.map.success", unit: "advisories");
_rateLimitRemaining = _meter.CreateHistogram<long>("ghsa.ratelimit.remaining", unit: "requests");
_rateLimitLimit = _meter.CreateHistogram<long>("ghsa.ratelimit.limit", unit: "requests");
_rateLimitResetSeconds = _meter.CreateHistogram<double>("ghsa.ratelimit.reset_seconds", unit: "s");
_rateLimitExhausted = _meter.CreateCounter<long>("ghsa.ratelimit.exhausted", unit: "events");
}
public void FetchAttempt() => _fetchAttempts.Add(1);
@@ -46,5 +56,45 @@ public sealed class GhsaDiagnostics : IDisposable
public void MapSuccess(long count) => _mapSuccess.Add(count);
internal void RecordRateLimit(GhsaRateLimitSnapshot snapshot)
{
var tags = new KeyValuePair<string, object?>[]
{
new("phase", snapshot.Phase),
new("resource", snapshot.Resource ?? "unknown")
};
if (snapshot.Limit.HasValue)
{
_rateLimitLimit.Record(snapshot.Limit.Value, tags);
}
if (snapshot.Remaining.HasValue)
{
_rateLimitRemaining.Record(snapshot.Remaining.Value, tags);
}
if (snapshot.ResetAfter.HasValue)
{
_rateLimitResetSeconds.Record(snapshot.ResetAfter.Value.TotalSeconds, tags);
}
lock (_rateLimitLock)
{
_lastRateLimitSnapshot = snapshot;
}
}
internal void RateLimitExhausted(string phase)
=> _rateLimitExhausted.Add(1, new KeyValuePair<string, object?>("phase", phase));
internal GhsaRateLimitSnapshot? GetLastRateLimitSnapshot()
{
lock (_rateLimitLock)
{
return _lastRateLimitSnapshot;
}
}
public void Dispose() => _meter.Dispose();
}

View File

@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Linq;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Normalization.SemVer;
using StellaOps.Feedser.Storage.Mongo.Documents;
namespace StellaOps.Feedser.Source.Ghsa.Internal;
@@ -120,29 +121,9 @@ internal static class GhsaMapper
var rangeKind = SemVerEcosystems.Contains(ecosystem) ? "semver" : "vendor";
var packageType = SemVerEcosystems.Contains(ecosystem) ? AffectedPackageTypes.SemVer : AffectedPackageTypes.Vendor;
var versionRanges = new List<AffectedVersionRange>();
if (!string.IsNullOrWhiteSpace(affected.VulnerableRange) || !string.IsNullOrWhiteSpace(affected.PatchedVersion))
{
var primitives = new RangePrimitives(null, null, null, new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["ecosystem"] = ecosystem,
["package"] = packageName,
});
versionRanges.Add(new AffectedVersionRange(
rangeKind,
introducedVersion: null,
fixedVersion: Validation.TrimToNull(affected.PatchedVersion),
lastAffectedVersion: null,
rangeExpression: Validation.TrimToNull(affected.VulnerableRange),
provenance: new AdvisoryProvenance(
GhsaConnectorPlugin.SourceName,
"affected-range",
identifier,
recordedAt,
new[] { ProvenanceFieldMasks.VersionRanges }),
primitives: primitives));
}
var (ranges, normalizedVersions) = SemVerEcosystems.Contains(ecosystem)
? CreateSemVerVersionArtifacts(affected, identifier, ecosystem, packageName, recordedAt)
: CreateVendorVersionArtifacts(affected, rangeKind, identifier, ecosystem, packageName, recordedAt);
var statuses = new[]
{
@@ -160,9 +141,10 @@ internal static class GhsaMapper
packageType,
identifier,
platform: null,
versionRanges: versionRanges,
versionRanges: ranges,
statuses: statuses,
provenance: provenance));
provenance: provenance,
normalizedVersions: normalizedVersions));
}
return packages;
@@ -206,4 +188,142 @@ internal static class GhsaMapper
return results.Count == 0 ? Array.Empty<AdvisoryCredit>() : results;
}
private static (IReadOnlyList<AffectedVersionRange> Ranges, IReadOnlyList<NormalizedVersionRule> Normalized) CreateSemVerVersionArtifacts(
GhsaAffectedDto affected,
string identifier,
string ecosystem,
string packageName,
DateTimeOffset recordedAt)
{
var note = BuildNormalizedNote(identifier);
var results = SemVerRangeRuleBuilder.Build(affected.VulnerableRange, affected.PatchedVersion, note);
if (results.Count > 0)
{
var ranges = new List<AffectedVersionRange>(results.Count);
var normalized = new List<NormalizedVersionRule>(results.Count);
foreach (var result in results)
{
var primitive = result.Primitive;
var rangeExpression = ResolveRangeExpression(result.Expression, primitive.ConstraintExpression, affected.VulnerableRange);
ranges.Add(new AffectedVersionRange(
rangeKind: "semver",
introducedVersion: Validation.TrimToNull(primitive.Introduced),
fixedVersion: Validation.TrimToNull(primitive.Fixed),
lastAffectedVersion: Validation.TrimToNull(primitive.LastAffected),
rangeExpression: rangeExpression,
provenance: CreateRangeProvenance(identifier, recordedAt),
primitives: new RangePrimitives(
SemVer: primitive,
Nevra: null,
Evr: null,
VendorExtensions: CreateVendorExtensions(ecosystem, packageName))));
normalized.Add(result.NormalizedRule);
}
return (ranges.ToArray(), normalized.ToArray());
}
var fallbackRange = CreateFallbackRange("semver", affected, identifier, ecosystem, packageName, recordedAt);
if (fallbackRange is null)
{
return (Array.Empty<AffectedVersionRange>(), Array.Empty<NormalizedVersionRule>());
}
var fallbackRule = fallbackRange.ToNormalizedVersionRule(note);
var normalizedFallback = fallbackRule is null
? Array.Empty<NormalizedVersionRule>()
: new[] { fallbackRule };
return (new[] { fallbackRange }, normalizedFallback);
}
private static (IReadOnlyList<AffectedVersionRange> Ranges, IReadOnlyList<NormalizedVersionRule> Normalized) CreateVendorVersionArtifacts(
GhsaAffectedDto affected,
string rangeKind,
string identifier,
string ecosystem,
string packageName,
DateTimeOffset recordedAt)
{
var range = CreateFallbackRange(rangeKind, affected, identifier, ecosystem, packageName, recordedAt);
if (range is null)
{
return (Array.Empty<AffectedVersionRange>(), Array.Empty<NormalizedVersionRule>());
}
return (new[] { range }, Array.Empty<NormalizedVersionRule>());
}
private static AffectedVersionRange? CreateFallbackRange(
string rangeKind,
GhsaAffectedDto affected,
string identifier,
string ecosystem,
string packageName,
DateTimeOffset recordedAt)
{
var fixedVersion = Validation.TrimToNull(affected.PatchedVersion);
var rangeExpression = Validation.TrimToNull(affected.VulnerableRange);
if (fixedVersion is null && rangeExpression is null)
{
return null;
}
return new AffectedVersionRange(
rangeKind,
introducedVersion: null,
fixedVersion: fixedVersion,
lastAffectedVersion: null,
rangeExpression: rangeExpression,
provenance: CreateRangeProvenance(identifier, recordedAt),
primitives: new RangePrimitives(
SemVer: null,
Nevra: null,
Evr: null,
VendorExtensions: CreateVendorExtensions(ecosystem, packageName)));
}
private static AdvisoryProvenance CreateRangeProvenance(string identifier, DateTimeOffset recordedAt)
=> new(
GhsaConnectorPlugin.SourceName,
"affected-range",
identifier,
recordedAt,
new[] { ProvenanceFieldMasks.VersionRanges });
private static IReadOnlyDictionary<string, string> CreateVendorExtensions(string ecosystem, string packageName)
=> new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["ecosystem"] = ecosystem,
["package"] = packageName,
};
private static string? BuildNormalizedNote(string identifier)
{
var trimmed = Validation.TrimToNull(identifier);
return trimmed is null ? null : $"ghsa:{trimmed}";
}
private static string? ResolveRangeExpression(string? parsedExpression, string? constraintExpression, string? fallbackExpression)
{
var parsed = Validation.TrimToNull(parsedExpression);
if (parsed is not null)
{
return parsed;
}
var constraint = Validation.TrimToNull(constraintExpression);
if (constraint is not null)
{
return constraint;
}
return Validation.TrimToNull(fallbackExpression);
}
}

View File

@@ -0,0 +1,111 @@
using System;
using System.Collections.Generic;
using System.Globalization;
namespace StellaOps.Feedser.Source.Ghsa.Internal;
internal static class GhsaRateLimitParser
{
public static GhsaRateLimitSnapshot? TryParse(IReadOnlyDictionary<string, string>? headers, DateTimeOffset now, string phase)
{
if (headers is null || headers.Count == 0)
{
return null;
}
string? resource = null;
long? limit = null;
long? remaining = null;
long? used = null;
DateTimeOffset? resetAt = null;
TimeSpan? resetAfter = null;
TimeSpan? retryAfter = null;
var hasData = false;
if (TryGet(headers, "X-RateLimit-Resource", out var resourceValue) && !string.IsNullOrWhiteSpace(resourceValue))
{
resource = resourceValue;
hasData = true;
}
if (TryParseLong(headers, "X-RateLimit-Limit", out var limitValue))
{
limit = limitValue;
hasData = true;
}
if (TryParseLong(headers, "X-RateLimit-Remaining", out var remainingValue))
{
remaining = remainingValue;
hasData = true;
}
if (TryParseLong(headers, "X-RateLimit-Used", out var usedValue))
{
used = usedValue;
hasData = true;
}
if (TryParseLong(headers, "X-RateLimit-Reset", out var resetValue))
{
resetAt = DateTimeOffset.FromUnixTimeSeconds(resetValue);
var delta = resetAt.Value - now;
if (delta > TimeSpan.Zero)
{
resetAfter = delta;
}
hasData = true;
}
if (TryGet(headers, "Retry-After", out var retryAfterValue) && !string.IsNullOrWhiteSpace(retryAfterValue))
{
if (double.TryParse(retryAfterValue, NumberStyles.Float, CultureInfo.InvariantCulture, out var seconds) && seconds > 0)
{
retryAfter = TimeSpan.FromSeconds(seconds);
}
else if (DateTimeOffset.TryParse(retryAfterValue, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var retryAfterDate))
{
var delta = retryAfterDate - now;
if (delta > TimeSpan.Zero)
{
retryAfter = delta;
}
}
hasData = true;
}
if (!hasData)
{
return null;
}
return new GhsaRateLimitSnapshot(phase, resource, limit, remaining, used, resetAt, resetAfter, retryAfter);
}
private static bool TryGet(IReadOnlyDictionary<string, string> headers, string key, out string value)
{
foreach (var pair in headers)
{
if (pair.Key.Equals(key, StringComparison.OrdinalIgnoreCase))
{
value = pair.Value;
return true;
}
}
value = string.Empty;
return false;
}
private static bool TryParseLong(IReadOnlyDictionary<string, string> headers, string key, out long result)
{
result = 0;
if (TryGet(headers, key, out var value) && long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
result = parsed;
return true;
}
return false;
}
}

View File

@@ -0,0 +1,23 @@
using System;
namespace StellaOps.Feedser.Source.Ghsa.Internal;
internal readonly record struct GhsaRateLimitSnapshot(
string Phase,
string? Resource,
long? Limit,
long? Remaining,
long? Used,
DateTimeOffset? ResetAt,
TimeSpan? ResetAfter,
TimeSpan? RetryAfter)
{
public bool HasData =>
Limit.HasValue ||
Remaining.HasValue ||
Used.HasValue ||
ResetAt.HasValue ||
ResetAfter.HasValue ||
RetryAfter.HasValue ||
!string.IsNullOrEmpty(Resource);
}

View File

@@ -1,3 +1,4 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("FixtureUpdater")]
[assembly: InternalsVisibleTo("StellaOps.Feedser.Source.Ghsa.Tests")]

View File

@@ -9,8 +9,9 @@
<ItemGroup>
<ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
</ItemGroup>
</Project>
<ProjectReference Include="../StellaOps.Feedser.Source.Common/StellaOps.Feedser.Source.Common.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Normalization/StellaOps.Feedser.Normalization.csproj" />
</ItemGroup>
</Project>

View File

@@ -4,10 +4,12 @@
|Select GHSA data source & auth model|BE-Conn-GHSA|Research|**DONE (2025-10-10)** Adopted GitHub Security Advisories REST (global) endpoint with bearer token + API version headers documented in `GhsaOptions`.|
|Fetch pipeline & state management|BE-Conn-GHSA|Source.Common, Storage.Mongo|**DONE (2025-10-10)** Implemented list/detail fetch using `GhsaCursor` (time window + page), resumable SourceState and backoff controls.|
|DTO & parser implementation|BE-Conn-GHSA|Source.Common|**DONE (2025-10-10)** Added `GhsaRecordParser`/DTOs extracting aliases, references, severity, vulnerable ranges, patched versions.|
|Canonical mapping & range primitives|BE-Conn-GHSA|Models|**DONE (2025-10-10)** `GhsaMapper` emits GHSA advisories with SemVer packages, vendor extensions (ecosystem/package) and deterministic references.|
|Canonical mapping & range primitives|BE-Conn-GHSA|Models|**DONE (2025-10-10)** `GhsaMapper` emits GHSA advisories with SemVer packages, vendor extensions (ecosystem/package) and deterministic references.<br>2025-10-11 research trail: upcoming normalized array should follow `[{"scheme":"semver","type":"range","min":"<min>","minInclusive":true,"max":"<max>","maxInclusive":false,"notes":"ghsa:GHSA-xxxx"}]`; include patched-only advisories as `lt`/`lte` when no explicit floor.|
|Deterministic fixtures & tests|QA|Testing|**DONE (2025-10-10)** New `StellaOps.Feedser.Source.Ghsa.Tests` regression covers fetch/parse/map via canned GHSA fixtures and snapshot assertions.|
|Telemetry & documentation|DevEx|Docs|**DONE (2025-10-10)** Diagnostics meter (`ghsa.fetch.*`) wired; DI extension documents token/headers and job registrations.|
|GitHub quota monitoring & retries|BE-Conn-GHSA, Observability|Source.Common|**TODO** Add rate-limit dashboard/alerts consuming `X-RateLimit-*` headers, tune retry/backoff strategy for 403/secondary rate limits, and document mitigation steps.|
|Production credential & scheduler rollout|Ops, BE-Conn-GHSA|Docs, WebService|**TODO** Issue PAT for production, update Helm/compose secrets, register fetch/parse/map cron defaults, and schedule staged backfill with health checks.|
|GitHub quota monitoring & retries|BE-Conn-GHSA, Observability|Source.Common|**DONE (2025-10-12)** Rate-limit metrics/logs added, retry/backoff handles 403 secondary limits, and ops runbook documents dashboards + mitigation steps.|
|Production credential & scheduler rollout|Ops, BE-Conn-GHSA|Docs, WebService|**DONE (2025-10-12)** Scheduler defaults registered via `JobSchedulerBuilder`, credential provisioning documented (Compose/Helm samples), and staged backfill guidance captured in `docs/ops/feedser-ghsa-operations.md`.|
|FEEDCONN-GHSA-04-002 Conflict regression fixtures|BE-Conn-GHSA, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** Added `conflict-ghsa.canonical.json` + `GhsaConflictFixtureTests`; SemVer ranges and credits align with merge precedence triple and shareable with QA. Validation: `dotnet test src/StellaOps.Feedser.Source.Ghsa.Tests/StellaOps.Feedser.Source.Ghsa.Tests.csproj --filter GhsaConflictFixtureTests`.|
|FEEDCONN-GHSA-02-004 GHSA credits & ecosystem severity mapping|BE-Conn-GHSA|Models `FEEDMODELS-SCHEMA-01-002`|**DONE (2025-10-11)** Mapper emits advisory credits with provenance masks, fixtures assert role/contact ordering, and severity normalization remains unchanged.|
|FEEDCONN-GHSA-02-007 Credit parity regression fixtures|BE-Conn-GHSA, QA|Source.Nvd, Source.Osv|**TODO** Capture GHSA/NVD/OSV triples with acknowledgements, wire conflict tests, and document fixture refresh workflow in `docs/dev/fixtures.md`.|
|FEEDCONN-GHSA-02-007 Credit parity regression fixtures|BE-Conn-GHSA, QA|Source.Nvd, Source.Osv|**DONE (2025-10-12)** Credit parity fixtures recorded, regression tests cover GHSA/OSV/NVD alignment, and regeneration workflow documented in `docs/dev/fixtures.md`.|
|FEEDCONN-GHSA-02-001 Normalized versions rollout|BE-Conn-GHSA|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-11)** GHSA mapper now emits SemVer primitives + normalized ranges, fixtures refreshed, connector tests passing; report logged via FEEDMERGE-COORD-02-900.|