Rename Feedser to Concelier

This commit is contained in:
2025-10-18 20:04:15 +03:00
parent 7e1b10d3b2
commit 0137856fdb
1208 changed files with 4370 additions and 4370 deletions

View File

@@ -0,0 +1,33 @@
# AGENTS
## Role
Deterministic merge and reconciliation engine; builds identity graph via aliases; applies precedence (PSIRT/OVAL > NVD; KEV flag only; regional feeds enrich); produces canonical advisory JSON and merge_event audit trail.
## Scope
- Identity: resolve advisory_key (prefer CVE, else PSIRT/Distro/JVN/BDU/GHSA/ICSA); unify aliases; detect collisions.
- Precedence: override rules for affected ranges (vendor PSIRT/OVAL over registry), enrichment-only feeds (CERTs/JVN/RU-CERT), KEV toggles exploitKnown only.
- Range comparers: RPM NEVRA comparer (epoch:version-release), Debian EVR comparer, SemVer range resolver; platform-aware selection.
- Merge algorithm: stable ordering, pure functions, idempotence; compute beforeHash/afterHash over canonical form; write merge_event.
- Conflict reporting: counters and logs for identity conflicts, reference merges, range overrides.
## Participants
- Storage.Mongo (reads raw mapped advisories, writes merged docs plus merge_event).
- Models (canonical types).
- Exporters (consume merged canonical).
- Core/WebService (jobs: merge:run, maybe per-kind).
## Interfaces & contracts
- AdvisoryMergeService.MergeAsync(ids or byKind): returns summary {processed, merged, overrides, conflicts}.
- Precedence table configurable but with sane defaults: RedHat/Ubuntu/Debian/SUSE > Vendor PSIRT > GHSA/OSV > NVD; CERTs enrich; KEV sets flags.
- Range selection uses comparers: NevraComparer, DebEvrComparer, SemVerRange; deterministic tie-breakers.
- Provenance propagation merges unique entries; references deduped by (url, type).
## Configuration
- Precedence overrides bind via `concelier:merge:precedence:ranks` (dictionary of `source``rank`, lower wins). Absent entries fall back to defaults.
- Operator workflow: update `etc/concelier.yaml` or environment variables, restart merge job; overrides surface in metrics/logs as `AdvisoryOverride` entries.
## In/Out of scope
In: merge logic, precedence policy, hashing, event records, comparers.
Out: fetching/parsing, exporter packaging, signing.
## Observability & security expectations
- Metrics: merge.delta.count, merge.identity.conflicts, merge.range.overrides, merge.duration_ms.
- Logs: decisions (why replaced), keys involved, hashes; avoid dumping large blobs; redact secrets (none expected).
## Tests
- Author and review coverage in `../StellaOps.Concelier.Merge.Tests`.
- Shared fixtures (e.g., `MongoIntegrationFixture`, `ConnectorTestHarness`) live in `../StellaOps.Concelier.Testing`.
- Keep fixtures deterministic; match new cases to real-world advisories or regression scenarios.

View File

@@ -0,0 +1 @@
// Intentionally left blank; types moved into dedicated files.

View File

@@ -0,0 +1,232 @@
namespace StellaOps.Concelier.Merge.Comparers;
using System;
using StellaOps.Concelier.Normalization.Distro;
public sealed class DebianEvrComparer : IComparer<DebianEvr>, IComparer<string>
{
public static DebianEvrComparer Instance { get; } = new();
private DebianEvrComparer()
{
}
public int Compare(string? x, string? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var xParsed = DebianEvr.TryParse(x, out var xEvr);
var yParsed = DebianEvr.TryParse(y, out var yEvr);
if (xParsed && yParsed)
{
return Compare(xEvr, yEvr);
}
if (xParsed)
{
return 1;
}
if (yParsed)
{
return -1;
}
return string.Compare(x, y, StringComparison.Ordinal);
}
public int Compare(DebianEvr? x, DebianEvr? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var compare = x.Epoch.CompareTo(y.Epoch);
if (compare != 0)
{
return compare;
}
compare = CompareSegment(x.Version, y.Version);
if (compare != 0)
{
return compare;
}
compare = CompareSegment(x.Revision, y.Revision);
if (compare != 0)
{
return compare;
}
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
}
private static int CompareSegment(string left, string right)
{
var i = 0;
var j = 0;
while (i < left.Length || j < right.Length)
{
while (i < left.Length && !IsAlphaNumeric(left[i]) && left[i] != '~')
{
i++;
}
while (j < right.Length && !IsAlphaNumeric(right[j]) && right[j] != '~')
{
j++;
}
var leftChar = i < left.Length ? left[i] : '\0';
var rightChar = j < right.Length ? right[j] : '\0';
if (leftChar == '~' || rightChar == '~')
{
if (leftChar != rightChar)
{
return leftChar == '~' ? -1 : 1;
}
i += leftChar == '~' ? 1 : 0;
j += rightChar == '~' ? 1 : 0;
continue;
}
var leftIsDigit = char.IsDigit(leftChar);
var rightIsDigit = char.IsDigit(rightChar);
if (leftIsDigit && rightIsDigit)
{
var leftStart = i;
while (i < left.Length && char.IsDigit(left[i]))
{
i++;
}
var rightStart = j;
while (j < right.Length && char.IsDigit(right[j]))
{
j++;
}
var leftTrimmed = leftStart;
while (leftTrimmed < i && left[leftTrimmed] == '0')
{
leftTrimmed++;
}
var rightTrimmed = rightStart;
while (rightTrimmed < j && right[rightTrimmed] == '0')
{
rightTrimmed++;
}
var leftLength = i - leftTrimmed;
var rightLength = j - rightTrimmed;
if (leftLength != rightLength)
{
return leftLength.CompareTo(rightLength);
}
var comparison = left.AsSpan(leftTrimmed, leftLength)
.CompareTo(right.AsSpan(rightTrimmed, rightLength), StringComparison.Ordinal);
if (comparison != 0)
{
return comparison;
}
continue;
}
if (leftIsDigit)
{
return 1;
}
if (rightIsDigit)
{
return -1;
}
var leftOrder = CharOrder(leftChar);
var rightOrder = CharOrder(rightChar);
var orderComparison = leftOrder.CompareTo(rightOrder);
if (orderComparison != 0)
{
return orderComparison;
}
if (leftChar != rightChar)
{
return leftChar.CompareTo(rightChar);
}
if (leftChar == '\0')
{
return 0;
}
i++;
j++;
}
return 0;
}
private static bool IsAlphaNumeric(char value)
=> char.IsLetterOrDigit(value);
private static int CharOrder(char value)
{
if (value == '\0')
{
return 0;
}
if (value == '~')
{
return -1;
}
if (char.IsDigit(value))
{
return 0;
}
if (char.IsLetter(value))
{
return value;
}
return value + 256;
}
}

View File

@@ -0,0 +1,264 @@
namespace StellaOps.Concelier.Merge.Comparers;
using System;
using StellaOps.Concelier.Normalization.Distro;
public sealed class NevraComparer : IComparer<Nevra>, IComparer<string>
{
public static NevraComparer Instance { get; } = new();
private NevraComparer()
{
}
public int Compare(string? x, string? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var xParsed = Nevra.TryParse(x, out var xNevra);
var yParsed = Nevra.TryParse(y, out var yNevra);
if (xParsed && yParsed)
{
return Compare(xNevra, yNevra);
}
if (xParsed)
{
return 1;
}
if (yParsed)
{
return -1;
}
return string.Compare(x, y, StringComparison.Ordinal);
}
public int Compare(Nevra? x, Nevra? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
if (x is null)
{
return -1;
}
if (y is null)
{
return 1;
}
var compare = string.Compare(x.Name, y.Name, StringComparison.Ordinal);
if (compare != 0)
{
return compare;
}
compare = string.Compare(x.Architecture ?? string.Empty, y.Architecture ?? string.Empty, StringComparison.Ordinal);
if (compare != 0)
{
return compare;
}
compare = x.Epoch.CompareTo(y.Epoch);
if (compare != 0)
{
return compare;
}
compare = RpmVersionComparer.Compare(x.Version, y.Version);
if (compare != 0)
{
return compare;
}
compare = RpmVersionComparer.Compare(x.Release, y.Release);
if (compare != 0)
{
return compare;
}
return string.Compare(x.Original, y.Original, StringComparison.Ordinal);
}
}
internal static class RpmVersionComparer
{
public static int Compare(string? left, string? right)
{
left ??= string.Empty;
right ??= string.Empty;
var i = 0;
var j = 0;
while (true)
{
var leftHasTilde = SkipToNextSegment(left, ref i);
var rightHasTilde = SkipToNextSegment(right, ref j);
if (leftHasTilde || rightHasTilde)
{
if (leftHasTilde && rightHasTilde)
{
continue;
}
return leftHasTilde ? -1 : 1;
}
var leftEnd = i >= left.Length;
var rightEnd = j >= right.Length;
if (leftEnd || rightEnd)
{
if (leftEnd && rightEnd)
{
return 0;
}
return leftEnd ? -1 : 1;
}
var leftDigit = char.IsDigit(left[i]);
var rightDigit = char.IsDigit(right[j]);
if (leftDigit && !rightDigit)
{
return 1;
}
if (!leftDigit && rightDigit)
{
return -1;
}
int compare;
if (leftDigit)
{
compare = CompareNumericSegment(left, ref i, right, ref j);
}
else
{
compare = CompareAlphaSegment(left, ref i, right, ref j);
}
if (compare != 0)
{
return compare;
}
}
}
private static bool SkipToNextSegment(string value, ref int index)
{
var sawTilde = false;
while (index < value.Length)
{
var current = value[index];
if (current == '~')
{
sawTilde = true;
index++;
break;
}
if (char.IsLetterOrDigit(current))
{
break;
}
index++;
}
return sawTilde;
}
private static int CompareNumericSegment(string value, ref int index, string other, ref int otherIndex)
{
var start = index;
while (index < value.Length && char.IsDigit(value[index]))
{
index++;
}
var otherStart = otherIndex;
while (otherIndex < other.Length && char.IsDigit(other[otherIndex]))
{
otherIndex++;
}
var trimmedStart = start;
while (trimmedStart < index && value[trimmedStart] == '0')
{
trimmedStart++;
}
var otherTrimmedStart = otherStart;
while (otherTrimmedStart < otherIndex && other[otherTrimmedStart] == '0')
{
otherTrimmedStart++;
}
var length = index - trimmedStart;
var otherLength = otherIndex - otherTrimmedStart;
if (length != otherLength)
{
return length.CompareTo(otherLength);
}
var comparison = value.AsSpan(trimmedStart, length)
.CompareTo(other.AsSpan(otherTrimmedStart, otherLength), StringComparison.Ordinal);
if (comparison != 0)
{
return comparison;
}
return 0;
}
private static int CompareAlphaSegment(string value, ref int index, string other, ref int otherIndex)
{
var start = index;
while (index < value.Length && char.IsLetter(value[index]))
{
index++;
}
var otherStart = otherIndex;
while (otherIndex < other.Length && char.IsLetter(other[otherIndex]))
{
otherIndex++;
}
var length = index - start;
var otherLength = otherIndex - otherStart;
var comparison = value.AsSpan(start, length)
.CompareTo(other.AsSpan(otherStart, otherLength), StringComparison.Ordinal);
if (comparison != 0)
{
return comparison;
}
return 0;
}
}

View File

@@ -0,0 +1,73 @@
namespace StellaOps.Concelier.Merge.Comparers;
using System.Diagnostics.CodeAnalysis;
using Semver;
/// <summary>
/// Provides helpers to interpret introduced/fixed/lastAffected SemVer ranges and compare versions.
/// </summary>
public static class SemanticVersionRangeResolver
{
public static bool TryParse(string? value, [NotNullWhen(true)] out SemVersion? result)
=> SemVersion.TryParse(value, SemVersionStyles.Any, out result);
public static SemVersion Parse(string value)
=> SemVersion.Parse(value, SemVersionStyles.Any);
/// <summary>
/// Resolves the effective start and end versions using introduced/fixed/lastAffected semantics.
/// </summary>
public static (SemVersion? introduced, SemVersion? exclusiveUpperBound, SemVersion? inclusiveUpperBound) ResolveWindows(
string? introduced,
string? fixedVersion,
string? lastAffected)
{
var introducedVersion = TryParse(introduced, out var parsedIntroduced) ? parsedIntroduced : null;
var fixedVersionParsed = TryParse(fixedVersion, out var parsedFixed) ? parsedFixed : null;
var lastAffectedVersion = TryParse(lastAffected, out var parsedLast) ? parsedLast : null;
SemVersion? exclusiveUpper = null;
SemVersion? inclusiveUpper = null;
if (fixedVersionParsed is not null)
{
exclusiveUpper = fixedVersionParsed;
}
else if (lastAffectedVersion is not null)
{
inclusiveUpper = lastAffectedVersion;
exclusiveUpper = NextPatch(lastAffectedVersion);
}
return (introducedVersion, exclusiveUpper, inclusiveUpper);
}
public static int Compare(string? left, string? right)
{
var leftParsed = TryParse(left, out var leftSemver);
var rightParsed = TryParse(right, out var rightSemver);
if (leftParsed && rightParsed)
{
return SemVersion.CompareSortOrder(leftSemver, rightSemver);
}
if (leftParsed)
{
return 1;
}
if (rightParsed)
{
return -1;
}
return string.Compare(left, right, StringComparison.Ordinal);
}
private static SemVersion NextPatch(SemVersion version)
{
return new SemVersion(version.Major, version.Minor, version.Patch + 1);
}
}

View File

@@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Merge.Identity;
/// <summary>
/// Represents a connected component of advisories that refer to the same vulnerability.
/// </summary>
public sealed class AdvisoryIdentityCluster
{
public AdvisoryIdentityCluster(string advisoryKey, IEnumerable<Advisory> advisories, IEnumerable<AliasIdentity> aliases)
{
AdvisoryKey = !string.IsNullOrWhiteSpace(advisoryKey)
? advisoryKey.Trim()
: throw new ArgumentException("Canonical advisory key must be provided.", nameof(advisoryKey));
var advisoriesArray = (advisories ?? throw new ArgumentNullException(nameof(advisories)))
.Where(static advisory => advisory is not null)
.OrderBy(static advisory => advisory.AdvisoryKey, StringComparer.OrdinalIgnoreCase)
.ThenBy(static advisory => advisory.Provenance.Length)
.ThenBy(static advisory => advisory.Title, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
if (advisoriesArray.IsDefaultOrEmpty)
{
throw new ArgumentException("At least one advisory is required for a cluster.", nameof(advisories));
}
var aliasArray = (aliases ?? throw new ArgumentNullException(nameof(aliases)))
.Where(static alias => alias is not null && !string.IsNullOrWhiteSpace(alias.Value))
.GroupBy(static alias => alias.Value, StringComparer.OrdinalIgnoreCase)
.Select(static group =>
{
var representative = group
.OrderBy(static entry => entry.Scheme ?? string.Empty, StringComparer.OrdinalIgnoreCase)
.ThenBy(static entry => entry.Value, StringComparer.OrdinalIgnoreCase)
.First();
return representative;
})
.OrderBy(static alias => alias.Scheme ?? string.Empty, StringComparer.OrdinalIgnoreCase)
.ThenBy(static alias => alias.Value, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
Advisories = advisoriesArray;
Aliases = aliasArray;
}
public string AdvisoryKey { get; }
public ImmutableArray<Advisory> Advisories { get; }
public ImmutableArray<AliasIdentity> Aliases { get; }
}

View File

@@ -0,0 +1,303 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Runtime.CompilerServices;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Merge.Identity;
/// <summary>
/// Builds an alias-driven identity graph that groups advisories referring to the same vulnerability.
/// </summary>
public sealed class AdvisoryIdentityResolver
{
private static readonly string[] CanonicalAliasPriority =
{
AliasSchemes.Cve,
AliasSchemes.Rhsa,
AliasSchemes.Usn,
AliasSchemes.Dsa,
AliasSchemes.SuseSu,
AliasSchemes.Msrc,
AliasSchemes.CiscoSa,
AliasSchemes.OracleCpu,
AliasSchemes.Vmsa,
AliasSchemes.Apsb,
AliasSchemes.Apa,
AliasSchemes.AppleHt,
AliasSchemes.ChromiumPost,
AliasSchemes.Icsa,
AliasSchemes.Jvndb,
AliasSchemes.Jvn,
AliasSchemes.Bdu,
AliasSchemes.Vu,
AliasSchemes.Ghsa,
AliasSchemes.OsV,
};
/// <summary>
/// Groups the provided advisories into identity clusters using normalized aliases.
/// </summary>
public IReadOnlyList<AdvisoryIdentityCluster> Resolve(IEnumerable<Advisory> advisories)
{
ArgumentNullException.ThrowIfNull(advisories);
var materialized = advisories
.Where(static advisory => advisory is not null)
.Distinct()
.ToArray();
if (materialized.Length == 0)
{
return Array.Empty<AdvisoryIdentityCluster>();
}
var aliasIndex = BuildAliasIndex(materialized);
var visited = new HashSet<Advisory>();
var clusters = new List<AdvisoryIdentityCluster>();
foreach (var advisory in materialized)
{
if (!visited.Add(advisory))
{
continue;
}
var component = TraverseComponent(advisory, visited, aliasIndex);
var key = DetermineCanonicalKey(component);
var aliases = component
.SelectMany(static entry => entry.Aliases)
.Select(static alias => new AliasIdentity(alias.Normalized, alias.Scheme));
clusters.Add(new AdvisoryIdentityCluster(key, component.Select(static entry => entry.Advisory), aliases));
}
return clusters
.OrderBy(static cluster => cluster.AdvisoryKey, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private static Dictionary<string, List<AdvisoryAliasEntry>> BuildAliasIndex(IEnumerable<Advisory> advisories)
{
var index = new Dictionary<string, List<AdvisoryAliasEntry>>(StringComparer.OrdinalIgnoreCase);
foreach (var advisory in advisories)
{
foreach (var alias in ExtractAliases(advisory))
{
if (!index.TryGetValue(alias.Normalized, out var list))
{
list = new List<AdvisoryAliasEntry>();
index[alias.Normalized] = list;
}
list.Add(new AdvisoryAliasEntry(advisory, alias.Normalized, alias.Scheme));
}
}
return index;
}
private static IReadOnlyList<AliasBinding> TraverseComponent(
Advisory root,
HashSet<Advisory> visited,
Dictionary<string, List<AdvisoryAliasEntry>> aliasIndex)
{
var stack = new Stack<Advisory>();
stack.Push(root);
var bindings = new Dictionary<Advisory, AliasBinding>(ReferenceEqualityComparer<Advisory>.Instance);
while (stack.Count > 0)
{
var advisory = stack.Pop();
if (!bindings.TryGetValue(advisory, out var binding))
{
binding = new AliasBinding(advisory);
bindings[advisory] = binding;
}
foreach (var alias in ExtractAliases(advisory))
{
binding.AddAlias(alias.Normalized, alias.Scheme);
if (!aliasIndex.TryGetValue(alias.Normalized, out var neighbors))
{
continue;
}
foreach (var neighbor in neighbors.Select(static entry => entry.Advisory))
{
if (visited.Add(neighbor))
{
stack.Push(neighbor);
}
if (!bindings.TryGetValue(neighbor, out var neighborBinding))
{
neighborBinding = new AliasBinding(neighbor);
bindings[neighbor] = neighborBinding;
}
neighborBinding.AddAlias(alias.Normalized, alias.Scheme);
}
}
}
return bindings.Values
.OrderBy(static binding => binding.Advisory.AdvisoryKey, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private static string DetermineCanonicalKey(IReadOnlyList<AliasBinding> component)
{
var aliases = component
.SelectMany(static binding => binding.Aliases)
.Where(static alias => !string.IsNullOrWhiteSpace(alias.Normalized))
.ToArray();
foreach (var scheme in CanonicalAliasPriority)
{
var candidate = aliases
.Where(alias => string.Equals(alias.Scheme, scheme, StringComparison.OrdinalIgnoreCase))
.Select(static alias => alias.Normalized)
.OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase)
.FirstOrDefault();
if (candidate is not null)
{
return candidate;
}
}
var fallbackAlias = aliases
.Select(static alias => alias.Normalized)
.OrderBy(static alias => alias, StringComparer.OrdinalIgnoreCase)
.FirstOrDefault();
if (!string.IsNullOrWhiteSpace(fallbackAlias))
{
return fallbackAlias;
}
var advisoryKey = component
.Select(static binding => binding.Advisory.AdvisoryKey)
.Where(static value => !string.IsNullOrWhiteSpace(value))
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.FirstOrDefault();
if (!string.IsNullOrWhiteSpace(advisoryKey))
{
return advisoryKey.Trim();
}
throw new InvalidOperationException("Unable to determine canonical advisory key for cluster.");
}
private static IEnumerable<AliasProjection> ExtractAliases(Advisory advisory)
{
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var candidate in EnumerateAliasCandidates(advisory))
{
if (string.IsNullOrWhiteSpace(candidate))
{
continue;
}
var trimmed = candidate.Trim();
if (!seen.Add(trimmed))
{
continue;
}
if (AliasSchemeRegistry.TryNormalize(trimmed, out var normalized, out var scheme) &&
!string.IsNullOrWhiteSpace(normalized))
{
yield return new AliasProjection(normalized.Trim(), string.IsNullOrWhiteSpace(scheme) ? null : scheme);
}
else if (!string.IsNullOrWhiteSpace(normalized))
{
yield return new AliasProjection(normalized.Trim(), null);
}
}
}
private static IEnumerable<string> EnumerateAliasCandidates(Advisory advisory)
{
if (!string.IsNullOrWhiteSpace(advisory.AdvisoryKey))
{
yield return advisory.AdvisoryKey;
}
if (!advisory.Aliases.IsDefaultOrEmpty)
{
foreach (var alias in advisory.Aliases)
{
if (!string.IsNullOrWhiteSpace(alias))
{
yield return alias;
}
}
}
}
private readonly record struct AdvisoryAliasEntry(Advisory Advisory, string Normalized, string? Scheme);
private readonly record struct AliasProjection(string Normalized, string? Scheme);
private sealed class AliasBinding
{
private readonly HashSet<AliasProjection> _aliases = new(HashSetAliasComparer.Instance);
public AliasBinding(Advisory advisory)
{
Advisory = advisory ?? throw new ArgumentNullException(nameof(advisory));
}
public Advisory Advisory { get; }
public IReadOnlyCollection<AliasProjection> Aliases => _aliases;
public void AddAlias(string normalized, string? scheme)
{
if (string.IsNullOrWhiteSpace(normalized))
{
return;
}
_aliases.Add(new AliasProjection(normalized.Trim(), scheme is null ? null : scheme.Trim()));
}
}
private sealed class HashSetAliasComparer : IEqualityComparer<AliasProjection>
{
public static readonly HashSetAliasComparer Instance = new();
public bool Equals(AliasProjection x, AliasProjection y)
=> string.Equals(x.Normalized, y.Normalized, StringComparison.OrdinalIgnoreCase)
&& string.Equals(x.Scheme, y.Scheme, StringComparison.OrdinalIgnoreCase);
public int GetHashCode(AliasProjection obj)
{
var hash = StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Normalized);
if (!string.IsNullOrWhiteSpace(obj.Scheme))
{
hash = HashCode.Combine(hash, StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Scheme));
}
return hash;
}
}
private sealed class ReferenceEqualityComparer<T> : IEqualityComparer<T>
where T : class
{
public static readonly ReferenceEqualityComparer<T> Instance = new();
public bool Equals(T? x, T? y) => ReferenceEquals(x, y);
public int GetHashCode(T obj) => RuntimeHelpers.GetHashCode(obj);
}
}

View File

@@ -0,0 +1,24 @@
using System;
namespace StellaOps.Concelier.Merge.Identity;
/// <summary>
/// Normalized alias representation used within identity clusters.
/// </summary>
public sealed class AliasIdentity
{
public AliasIdentity(string value, string? scheme)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException("Alias value must be provided.", nameof(value));
}
Value = value.Trim();
Scheme = string.IsNullOrWhiteSpace(scheme) ? null : scheme.Trim();
}
public string Value { get; }
public string? Scheme { get; }
}

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Concelier.Merge.Jobs;
internal static class MergeJobKinds
{
public const string Reconcile = "merge:reconcile";
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Merge.Services;
namespace StellaOps.Concelier.Merge.Jobs;
public sealed class MergeReconcileJob : IJob
{
private readonly AdvisoryMergeService _mergeService;
private readonly ILogger<MergeReconcileJob> _logger;
public MergeReconcileJob(AdvisoryMergeService mergeService, ILogger<MergeReconcileJob> logger)
{
_mergeService = mergeService ?? throw new ArgumentNullException(nameof(mergeService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken)
{
if (!context.Parameters.TryGetValue("seed", out var seedValue) || seedValue is not string seed || string.IsNullOrWhiteSpace(seed))
{
context.Logger.LogWarning("merge:reconcile job requires a non-empty 'seed' parameter.");
return;
}
var result = await _mergeService.MergeAsync(seed, cancellationToken).ConfigureAwait(false);
if (result.Merged is null)
{
_logger.LogInformation("No advisories available to merge for alias component seeded by {Seed}", seed);
return;
}
_logger.LogInformation(
"Merged alias component seeded by {Seed} into canonical {Canonical} using {Count} advisories; collisions={Collisions}",
seed,
result.CanonicalAdvisoryKey,
result.Inputs.Count,
result.Component.Collisions.Count);
}
}

View File

@@ -0,0 +1,43 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Merge.Jobs;
using StellaOps.Concelier.Merge.Options;
using StellaOps.Concelier.Merge.Services;
namespace StellaOps.Concelier.Merge;
public static class MergeServiceCollectionExtensions
{
public static IServiceCollection AddMergeModule(this IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.TryAddSingleton<CanonicalHashCalculator>();
services.TryAddSingleton<CanonicalMerger>();
services.TryAddSingleton<AliasGraphResolver>();
services.TryAddSingleton<AffectedPackagePrecedenceResolver>(sp =>
{
var options = configuration.GetSection("concelier:merge:precedence").Get<AdvisoryPrecedenceOptions>();
return options is null ? new AffectedPackagePrecedenceResolver() : new AffectedPackagePrecedenceResolver(options);
});
services.TryAddSingleton<AdvisoryPrecedenceMerger>(sp =>
{
var resolver = sp.GetRequiredService<AffectedPackagePrecedenceResolver>();
var options = configuration.GetSection("concelier:merge:precedence").Get<AdvisoryPrecedenceOptions>();
var timeProvider = sp.GetRequiredService<TimeProvider>();
var logger = sp.GetRequiredService<ILogger<AdvisoryPrecedenceMerger>>();
return new AdvisoryPrecedenceMerger(resolver, options, timeProvider, logger);
});
services.TryAddSingleton<MergeEventWriter>();
services.TryAddSingleton<AdvisoryMergeService>();
services.AddTransient<MergeReconcileJob>();
return services;
}
}

View File

@@ -0,0 +1,96 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Concelier.Merge.Options;
/// <summary>
/// Provides the built-in precedence table used by the merge engine when no overrides are supplied.
/// </summary>
internal static class AdvisoryPrecedenceDefaults
{
public static IReadOnlyDictionary<string, int> Rankings { get; } = CreateDefaultTable();
private static IReadOnlyDictionary<string, int> CreateDefaultTable()
{
var table = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase);
// 0 distro PSIRTs/OVAL feeds (authoritative for OS packages).
Add(table, 0,
"redhat",
"ubuntu",
"distro-ubuntu",
"debian",
"distro-debian",
"suse",
"distro-suse");
// 1 vendor PSIRTs (authoritative product advisories).
Add(table, 1,
"msrc",
"vndr-msrc",
"vndr-oracle",
"vndr_oracle",
"oracle",
"vndr-adobe",
"adobe",
"vndr-apple",
"apple",
"vndr-cisco",
"cisco",
"vmware",
"vndr-vmware",
"vndr_vmware",
"vndr-chromium",
"chromium",
"vendor");
// 2 ecosystem registries (OSS package maintainers).
Add(table, 2,
"ghsa",
"osv",
"cve");
// 3 regional CERT / ICS enrichment feeds.
Add(table, 3,
"jvn",
"acsc",
"cccs",
"cert-fr",
"certfr",
"cert-in",
"certin",
"cert-cc",
"certcc",
"certbund",
"cert-bund",
"ru-bdu",
"ru-nkcki",
"kisa",
"ics-cisa",
"ics-kaspersky");
// 4 KEV / exploit catalogue annotations (flag only).
Add(table, 4,
"kev",
"cisa-kev");
// 5 public registries (baseline data).
Add(table, 5,
"nvd");
return table;
}
private static void Add(IDictionary<string, int> table, int rank, params string[] sources)
{
foreach (var source in sources)
{
if (string.IsNullOrWhiteSpace(source))
{
continue;
}
table[source] = rank;
}
}
}

View File

@@ -0,0 +1,15 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Concelier.Merge.Options;
/// <summary>
/// Configurable precedence overrides for advisory sources.
/// </summary>
public sealed class AdvisoryPrecedenceOptions
{
/// <summary>
/// Mapping of provenance source identifiers to precedence ranks. Lower numbers take precedence.
/// </summary>
public IDictionary<string, int> Ranks { get; init; } = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase);
}

View File

@@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Concelier.Merge.Options;
internal static class AdvisoryPrecedenceTable
{
public static IReadOnlyDictionary<string, int> Merge(
IReadOnlyDictionary<string, int> defaults,
AdvisoryPrecedenceOptions? options)
{
if (defaults is null)
{
throw new ArgumentNullException(nameof(defaults));
}
if (options?.Ranks is null || options.Ranks.Count == 0)
{
return defaults;
}
var merged = new Dictionary<string, int>(defaults, StringComparer.OrdinalIgnoreCase);
foreach (var kvp in options.Ranks)
{
if (string.IsNullOrWhiteSpace(kvp.Key))
{
continue;
}
merged[kvp.Key.Trim()] = kvp.Value;
}
return merged;
}
}

View File

@@ -0,0 +1,95 @@
# Range Primitive Coordination (Sprint 2)
_Status date: 2025-10-11_
## Why this exists
- SemVer range outputs must follow the embedded rule guidance in `../FASTER_MODELING_AND_NORMALIZATION.md` (array of `{scheme,type,min/max/value,notes}`).
- Merge will rely on normalized rules plus existing `RangePrimitives` (SemVer/NEVRA/EVR) to dedupe ranges and compute deterministic hashes.
- Connector teams are mid-flight; this playbook restarts coordination so every feed delivers the normalized payload needed by the conflict resolver work in Sprint 3.
## Upstream dependencies
- **Models** (`FEEDMODELS-SCHEMA-01-003`, `FEEDMODELS-SCHEMA-02-900`) extends `RangePrimitives.SemVer` metadata and introduces `NormalizedVersionRule` arrays on affected packages.
- **Normalization** (`FEEDNORM-NORM-02-001`) provides `SemVerRangeRuleBuilder` used by OSS connectors (GHSA/OSV/NVD) to emit canonical rule docs plus provenance notes.
- **Storage.Mongo** (`FEEDSTORAGE-DATA-02-001`) dual-write/dual-read modifications for the new arrays; required before staging rollout.
- **Merge** (`FEEDMERGE-ENGINE-02-002`) unions/dedupes normalized rules across sources once connectors publish them.
Until these blocks land, connectors should stage changes behind a feature flag or fixture branch so we can flip on normalized writes in sync.
## Connector adoption matrix
| Connector | Owner team | Current state (2025-10-11) | Required actions for SemVer guidance | Coordination notes |
|-----------|------------|-----------------------------|-------------------------------------|--------------------|
| Acsc | BE-Conn-ACSC | All tasks still TODO | Blocked on initial ingest work; align DTO design with normalized rule array before mapper lands. | Schedule pairing once `SemVerRangeRuleBuilder` API is published; ensure fixtures capture vendor/device taxonomy for provenance notes. |
| Cccs | BE-Conn-CCCS | All tasks still TODO | Same as Acsc; design DTOs with normalized rule hooks from day one. | Provide sample rule snippets in kickoff; share Mongo dual-write plan once storage flag is ready. |
| CertBund | BE-Conn-CERTBUND | All tasks still TODO | Ensure canonical mapper emits vendor range primitives plus normalized rules for product firmware. | Needs language/localisation guidance; coordinate with Localization WG for deterministic casing. |
| CertCc | BE-Conn-CERTCC | Fetch in progress, mapping TODO | Map VINCE vendor/product data into `RangePrimitives` with `certcc.vendor` extensions; build normalized SemVer ranges when version strings surface. | Follow up on 2025-10-14 to review VINCE payload examples and confirm builder requirements. |
| Cve | BE-Conn-CVE | Mapping/tests DONE (legacy SemVer) | Refactor `CveMapper` to call the shared builder and populate `NormalizedVersions` + provenance notes once models land. | Prepare MR behind `ENABLE_NORMALIZED_VERSIONS` flag; regression fixtures already cover version ranges—extend snapshots to cover rule arrays. |
| Ghsa | BE-Conn-GHSA | Normalized rules emitted (2025-10-11) | Maintain SemVer builder integration; share regression diffs if schema shifts occur. | Fixtures refreshed with `ghsa:{identifier}` notes; OSV rollout next in queue—await connector handoff update. |
| Osv | BE-Conn-OSV | Normalized rules emitted (2025-10-11) | Keep SemVer builder wiring current; extend notes if new ecosystems appear. | npm/PyPI parity snapshots updated with `osv:{ecosystem}:{advisoryId}:{identifier}` notes; merge analytics notified. |
| Nvd | BE-Conn-NVD | Normalized rules emitted (2025-10-11) | Maintain SemVer coverage for ecosystem ranges; keep notes aligned with CVE IDs. | CPE ranges now emit semver primitives when versions parse; fixtures refreshed, report sent to FEEDMERGE-COORD-02-900. |
| Cve | BE-Conn-CVE | Normalized rules emitted (2025-10-11) | Maintain SemVer notes for vendor ecosystems; backfill additional fixture coverage as CVE payloads expand. | Connector outputs `cve:{cveId}:{identifier}` notes; npm parity test fixtures updated and merge ping acknowledged. |
| Ics.Cisa | BE-Conn-ICS-CISA | All tasks TODO | When defining product schema, plan for SemVer or vendor version rules (many advisories use firmware revisions). | Gather sample advisories and confirm whether ranges are SemVer or vendor-specific so we can introduce scheme identifiers early. |
| Kisa | BE-Conn-KISA | All tasks TODO | Ensure DTO parsing captures version strings despite localisation; feed into normalized rule builder once ready. | Requires translation samples; request help from Localization WG before mapper implementation. |
| Ru.Bdu | BE-Conn-BDU | All tasks TODO | Map product releases into normalized rules; add provenance notes referencing BDU advisory identifiers. | Verify we have UTF-8 safe handling in builder; share sample sanitized inputs. |
| Ru.Nkcki | BE-Conn-Nkcki | All tasks TODO | Similar to BDU; capture vendor firmware/build numbers and map into normalized rules. | Coordinate with Localization WG for Cyrillic transliteration strategy. |
| Vndr.Apple | BE-Conn-Apple | Mapper/tests/telemetry marked DOING | Continue extending vendor range primitives (`apple.version`, `apple.build`) and adopt normalized rule arrays for OS build spans. | Request builder integration review on 2025-10-16; ensure fixtures cover multi-range tables and include provenance notes. |
| Vndr.Cisco | BE-Conn-Cisco | ✅ Emits SemVer primitives with vendor notes | Parser maps versions into SemVer primitives with `cisco.productId` vendor extensions; sample fixtures landing in `StellaOps.Concelier.Source.Vndr.Cisco.Tests`. | No custom comparer required; SemVer + vendor metadata suffices. |
| Vndr.Msrc | BE-Conn-MSRC | All tasks TODO | Canonical mapper must output product/build coverage as normalized rules (likely `msrc.patch` scheme) with provenance referencing KB IDs. | Sync with Models on adding scheme identifiers for MSRC packages; plan fixture coverage for monthly rollups. |
## Storage alignment quick reference (2025-10-11)
- `NormalizedVersionDocumentFactory` copies each `NormalizedVersionRule` into Mongo with the shape `{ packageId, packageType, scheme, type, style, min, minInclusive, max, maxInclusive, value, notes, decisionReason, constraint, source, recordedAt }`. `style` is currently a direct echo of `type` but reserved for future vendor comparers—no connector action required.
- `constraint` is hydrated only when `NormalizedVersionRule` matches a legacy `VersionRange` primitive. Preserve `notes` (e.g., `nvd:cve-2025-1234`) so storage can join rules back to their provenance and carry decision reasoning.
- Valid `scheme` values today are `semver`, `nevra`, and `evr`. Raise a Models ticket before introducing additional scheme identifiers (e.g., `apple.build`, `ios.semver`).
- Prefer normalized `type` tokens from `NormalizedVersionRuleTypes` (`range`, `exact`, `lt`, `lte`, `gt`, `gte`). Builders already coerce casing/format—avoid custom strings.
- Ensure `AffectedPackage.Identifier`/`Type` and `Provenance` collections are populated; storage falls back to package-level provenance if range-level data is absent, but loses traceability if both are empty.
- Snapshot of an emitted document (SemVer range) for reference:
```json
{
"packageId": "pkg:npm/example",
"packageType": "npm",
"scheme": "semver",
"type": "range",
"style": "range",
"min": "1.2.3",
"minInclusive": true,
"max": "2.0.0",
"maxInclusive": false,
"value": null,
"notes": "ghsa:GHSA-xxxx-yyyy",
"decisionReason": "ghsa-precedence-over-nvd",
"constraint": ">= 1.2.3 < 2.0.0",
"source": "ghsa",
"recordedAt": "2025-10-11T00:00:00Z"
}
```
- For distro sources emitting NEVRA/EVR primitives, expect the same envelope with `scheme` swapped accordingly. Example (`nevra`):
```json
{
"packageId": "bash",
"packageType": "rpm",
"scheme": "nevra",
"type": "range",
"style": "range",
"min": "0:4.4.18-2.el7",
"minInclusive": true,
"max": "0:4.4.20-1.el7",
"maxInclusive": false,
"value": null,
"notes": "redhat:RHSA-2025:1234",
"decisionReason": "rhel-priority-over-nvd",
"constraint": "<= 0:4.4.20-1.el7",
"source": "redhat",
"recordedAt": "2025-10-11T00:00:00Z"
}
```
## Immediate next steps
- Normalization team to share draft `SemVerRangeRuleBuilder` API by **2025-10-13** for review; Merge will circulate feedback within 24 hours.
- Connector owners to prepare fixture pull requests demonstrating sample normalized rule arrays (even if feature-flagged) by **2025-10-17**.
- Merge team will run a cross-connector review on **2025-10-18** to confirm consistent field usage and provenance tagging before enabling merge union logic.
- Schedule held for **2025-10-14 14:00 UTC** to review the CERT/CC staging VINCE advisory sample once `enableDetailMapping` is flipped; capture findings in `#concelier-merge` with snapshot diffs.
## Tracking & follow-up
- Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`).
- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge.
- When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots.
- If new schemes or comparer logic is required (e.g., Cisco IOS), open a Models issue referencing `FEEDMODELS-SCHEMA-02-900` before implementing.

View File

@@ -0,0 +1,294 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics.Metrics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
namespace StellaOps.Concelier.Merge.Services;
public sealed class AdvisoryMergeService
{
private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge");
private static readonly Counter<long> AliasCollisionCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.identity_conflicts",
unit: "count",
description: "Number of alias collisions detected during merge.");
private static readonly string[] PreferredAliasSchemes =
{
AliasSchemes.Cve,
AliasSchemes.Ghsa,
AliasSchemes.OsV,
AliasSchemes.Msrc,
};
private readonly AliasGraphResolver _aliasResolver;
private readonly IAdvisoryStore _advisoryStore;
private readonly AdvisoryPrecedenceMerger _precedenceMerger;
private readonly MergeEventWriter _mergeEventWriter;
private readonly CanonicalMerger _canonicalMerger;
private readonly ILogger<AdvisoryMergeService> _logger;
public AdvisoryMergeService(
AliasGraphResolver aliasResolver,
IAdvisoryStore advisoryStore,
AdvisoryPrecedenceMerger precedenceMerger,
MergeEventWriter mergeEventWriter,
CanonicalMerger canonicalMerger,
ILogger<AdvisoryMergeService> logger)
{
_aliasResolver = aliasResolver ?? throw new ArgumentNullException(nameof(aliasResolver));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_precedenceMerger = precedenceMerger ?? throw new ArgumentNullException(nameof(precedenceMerger));
_mergeEventWriter = mergeEventWriter ?? throw new ArgumentNullException(nameof(mergeEventWriter));
_canonicalMerger = canonicalMerger ?? throw new ArgumentNullException(nameof(canonicalMerger));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(seedAdvisoryKey);
var component = await _aliasResolver.BuildComponentAsync(seedAdvisoryKey, cancellationToken).ConfigureAwait(false);
var inputs = new List<Advisory>();
foreach (var advisoryKey in component.AdvisoryKeys)
{
cancellationToken.ThrowIfCancellationRequested();
var advisory = await _advisoryStore.FindAsync(advisoryKey, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
inputs.Add(advisory);
}
}
if (inputs.Count == 0)
{
_logger.LogWarning("Alias component seeded by {Seed} contains no persisted advisories", seedAdvisoryKey);
return AdvisoryMergeResult.Empty(seedAdvisoryKey, component);
}
var canonicalKey = SelectCanonicalKey(component) ?? seedAdvisoryKey;
var canonicalMerge = ApplyCanonicalMergeIfNeeded(canonicalKey, inputs);
var before = await _advisoryStore.FindAsync(canonicalKey, cancellationToken).ConfigureAwait(false);
var normalizedInputs = NormalizeInputs(inputs, canonicalKey).ToList();
Advisory? merged;
try
{
merged = _precedenceMerger.Merge(normalizedInputs);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to merge alias component seeded by {Seed}", seedAdvisoryKey);
throw;
}
if (component.Collisions.Count > 0)
{
foreach (var collision in component.Collisions)
{
var tags = new KeyValuePair<string, object?>[]
{
new("scheme", collision.Scheme ?? string.Empty),
new("alias_value", collision.Value ?? string.Empty),
new("advisory_count", collision.AdvisoryKeys.Count),
};
AliasCollisionCounter.Add(1, tags);
_logger.LogInformation(
"Alias collision {Scheme}:{Value} involves advisories {Advisories}",
collision.Scheme,
collision.Value,
string.Join(", ", collision.AdvisoryKeys));
}
}
if (merged is not null)
{
await _advisoryStore.UpsertAsync(merged, cancellationToken).ConfigureAwait(false);
await _mergeEventWriter.AppendAsync(
canonicalKey,
before,
merged,
Array.Empty<Guid>(),
ConvertFieldDecisions(canonicalMerge?.Decisions),
cancellationToken).ConfigureAwait(false);
}
return new AdvisoryMergeResult(seedAdvisoryKey, canonicalKey, component, inputs, before, merged);
}
private static IEnumerable<Advisory> NormalizeInputs(IEnumerable<Advisory> advisories, string canonicalKey)
{
foreach (var advisory in advisories)
{
yield return CloneWithKey(advisory, canonicalKey);
}
}
private static Advisory CloneWithKey(Advisory source, string advisoryKey)
=> new(
advisoryKey,
source.Title,
source.Summary,
source.Language,
source.Published,
source.Modified,
source.Severity,
source.ExploitKnown,
source.Aliases,
source.Credits,
source.References,
source.AffectedPackages,
source.CvssMetrics,
source.Provenance,
source.Description,
source.Cwes,
source.CanonicalMetricId);
private CanonicalMergeResult? ApplyCanonicalMergeIfNeeded(string canonicalKey, List<Advisory> inputs)
{
if (inputs.Count == 0)
{
return null;
}
var ghsa = FindBySource(inputs, CanonicalSources.Ghsa);
var nvd = FindBySource(inputs, CanonicalSources.Nvd);
var osv = FindBySource(inputs, CanonicalSources.Osv);
var participatingSources = 0;
if (ghsa is not null)
{
participatingSources++;
}
if (nvd is not null)
{
participatingSources++;
}
if (osv is not null)
{
participatingSources++;
}
if (participatingSources < 2)
{
return null;
}
var result = _canonicalMerger.Merge(canonicalKey, ghsa, nvd, osv);
inputs.RemoveAll(advisory => MatchesCanonicalSource(advisory));
inputs.Add(result.Advisory);
return result;
}
private static Advisory? FindBySource(IEnumerable<Advisory> advisories, string source)
=> advisories.FirstOrDefault(advisory => advisory.Provenance.Any(provenance =>
!string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase) &&
string.Equals(provenance.Source, source, StringComparison.OrdinalIgnoreCase)));
private static bool MatchesCanonicalSource(Advisory advisory)
{
foreach (var provenance in advisory.Provenance)
{
if (string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (string.Equals(provenance.Source, CanonicalSources.Ghsa, StringComparison.OrdinalIgnoreCase) ||
string.Equals(provenance.Source, CanonicalSources.Nvd, StringComparison.OrdinalIgnoreCase) ||
string.Equals(provenance.Source, CanonicalSources.Osv, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
return false;
}
private static IReadOnlyList<MergeFieldDecision> ConvertFieldDecisions(ImmutableArray<FieldDecision>? decisions)
{
if (decisions is null || decisions.Value.IsDefaultOrEmpty)
{
return Array.Empty<MergeFieldDecision>();
}
var builder = ImmutableArray.CreateBuilder<MergeFieldDecision>(decisions.Value.Length);
foreach (var decision in decisions.Value)
{
builder.Add(new MergeFieldDecision(
decision.Field,
decision.SelectedSource,
decision.DecisionReason,
decision.SelectedModified,
decision.ConsideredSources.ToArray()));
}
return builder.ToImmutable();
}
private static class CanonicalSources
{
public const string Ghsa = "ghsa";
public const string Nvd = "nvd";
public const string Osv = "osv";
}
private static string? SelectCanonicalKey(AliasComponent component)
{
foreach (var scheme in PreferredAliasSchemes)
{
var alias = component.AliasMap.Values
.SelectMany(static aliases => aliases)
.FirstOrDefault(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(alias?.Value))
{
return alias.Value;
}
}
if (component.AliasMap.TryGetValue(component.SeedAdvisoryKey, out var seedAliases))
{
var primary = seedAliases.FirstOrDefault(record => string.Equals(record.Scheme, AliasStoreConstants.PrimaryScheme, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrWhiteSpace(primary?.Value))
{
return primary.Value;
}
}
var firstAlias = component.AliasMap.Values.SelectMany(static aliases => aliases).FirstOrDefault();
if (!string.IsNullOrWhiteSpace(firstAlias?.Value))
{
return firstAlias.Value;
}
return component.SeedAdvisoryKey;
}
}
public sealed record AdvisoryMergeResult(
string SeedAdvisoryKey,
string CanonicalAdvisoryKey,
AliasComponent Component,
IReadOnlyList<Advisory> Inputs,
Advisory? Previous,
Advisory? Merged)
{
public static AdvisoryMergeResult Empty(string seed, AliasComponent component)
=> new(seed, seed, component, Array.Empty<Advisory>(), null, null);
}

View File

@@ -0,0 +1,567 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.Globalization;
using System.Linq;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.Merge.Options;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Merge.Services;
/// <summary>
/// Merges canonical advisories emitted by different sources into a single precedence-resolved advisory.
/// </summary>
public sealed class AdvisoryPrecedenceMerger
{
private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge");
private static readonly Counter<long> MergeCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.operations",
unit: "count",
description: "Number of merge invocations executed by the precedence engine.");
private static readonly Counter<long> OverridesCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.overrides",
unit: "count",
description: "Number of times lower-precedence advisories were overridden by higher-precedence sources.");
private static readonly Counter<long> RangeOverrideCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.range_overrides",
unit: "count",
description: "Number of affected-package range overrides performed during precedence merge.");
private static readonly Counter<long> ConflictCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.conflicts",
unit: "count",
description: "Number of precedence conflicts detected (severity, rank ties, etc.).");
private static readonly Counter<long> NormalizedRuleCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.normalized_rules",
unit: "rule",
description: "Number of normalized version rules retained after precedence merge.");
private static readonly Counter<long> MissingNormalizedRuleCounter = MergeMeter.CreateCounter<long>(
"concelier.merge.normalized_rules_missing",
unit: "package",
description: "Number of affected packages with version ranges but no normalized rules.");
private static readonly Action<ILogger, MergeOverrideAudit, Exception?> OverrideLogged = LoggerMessage.Define<MergeOverrideAudit>(
LogLevel.Information,
new EventId(1000, "AdvisoryOverride"),
"Advisory precedence override {@Override}");
private static readonly Action<ILogger, PackageOverrideAudit, Exception?> RangeOverrideLogged = LoggerMessage.Define<PackageOverrideAudit>(
LogLevel.Information,
new EventId(1001, "PackageRangeOverride"),
"Affected package precedence override {@Override}");
private static readonly Action<ILogger, MergeFieldConflictAudit, Exception?> ConflictLogged = LoggerMessage.Define<MergeFieldConflictAudit>(
LogLevel.Information,
new EventId(1002, "PrecedenceConflict"),
"Precedence conflict {@Conflict}");
private readonly AffectedPackagePrecedenceResolver _packageResolver;
private readonly IReadOnlyDictionary<string, int> _precedence;
private readonly int _fallbackRank;
private readonly System.TimeProvider _timeProvider;
private readonly ILogger<AdvisoryPrecedenceMerger> _logger;
public AdvisoryPrecedenceMerger()
: this(new AffectedPackagePrecedenceResolver(), TimeProvider.System)
{
}
public AdvisoryPrecedenceMerger(AffectedPackagePrecedenceResolver packageResolver, System.TimeProvider? timeProvider = null)
: this(packageResolver, packageResolver?.Precedence ?? AdvisoryPrecedenceDefaults.Rankings, timeProvider ?? TimeProvider.System, NullLogger<AdvisoryPrecedenceMerger>.Instance)
{
}
public AdvisoryPrecedenceMerger(
AffectedPackagePrecedenceResolver packageResolver,
IReadOnlyDictionary<string, int> precedence,
System.TimeProvider timeProvider)
: this(packageResolver, precedence, timeProvider, NullLogger<AdvisoryPrecedenceMerger>.Instance)
{
}
public AdvisoryPrecedenceMerger(
AffectedPackagePrecedenceResolver packageResolver,
AdvisoryPrecedenceOptions? options,
System.TimeProvider timeProvider,
ILogger<AdvisoryPrecedenceMerger>? logger = null)
: this(
EnsureResolver(packageResolver, options, out var precedence),
precedence,
timeProvider,
logger)
{
}
public AdvisoryPrecedenceMerger(
AffectedPackagePrecedenceResolver packageResolver,
IReadOnlyDictionary<string, int> precedence,
System.TimeProvider timeProvider,
ILogger<AdvisoryPrecedenceMerger>? logger)
{
_packageResolver = packageResolver ?? throw new ArgumentNullException(nameof(packageResolver));
_precedence = precedence ?? throw new ArgumentNullException(nameof(precedence));
_fallbackRank = _precedence.Count == 0 ? 10 : _precedence.Values.Max() + 1;
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? NullLogger<AdvisoryPrecedenceMerger>.Instance;
}
public Advisory Merge(IEnumerable<Advisory> advisories)
{
if (advisories is null)
{
throw new ArgumentNullException(nameof(advisories));
}
var list = advisories.Where(static a => a is not null).ToList();
if (list.Count == 0)
{
throw new ArgumentException("At least one advisory is required for merge.", nameof(advisories));
}
var advisoryKey = list[0].AdvisoryKey;
if (list.Any(advisory => !string.Equals(advisory.AdvisoryKey, advisoryKey, StringComparison.Ordinal)))
{
throw new ArgumentException("All advisories must share the same advisory key.", nameof(advisories));
}
var ordered = list
.Select(advisory => new AdvisoryEntry(advisory, GetRank(advisory)))
.OrderBy(entry => entry.Rank)
.ThenByDescending(entry => entry.Advisory.Provenance.Length)
.ToArray();
MergeCounter.Add(1, new KeyValuePair<string, object?>("inputs", list.Count));
var primary = ordered[0].Advisory;
var title = PickString(ordered, advisory => advisory.Title) ?? advisoryKey;
var summary = PickString(ordered, advisory => advisory.Summary);
var language = PickString(ordered, advisory => advisory.Language);
var severity = PickString(ordered, advisory => advisory.Severity);
var aliases = ordered
.SelectMany(entry => entry.Advisory.Aliases)
.Where(static alias => !string.IsNullOrWhiteSpace(alias))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
var credits = ordered
.SelectMany(entry => entry.Advisory.Credits)
.Distinct()
.ToArray();
var references = ordered
.SelectMany(entry => entry.Advisory.References)
.Distinct()
.ToArray();
var packageResult = _packageResolver.Merge(ordered.SelectMany(entry => entry.Advisory.AffectedPackages));
RecordNormalizedRuleMetrics(packageResult.Packages);
var affectedPackages = packageResult.Packages;
var cvssMetrics = ordered
.SelectMany(entry => entry.Advisory.CvssMetrics)
.Distinct()
.ToArray();
var published = PickDateTime(ordered, static advisory => advisory.Published);
var modified = PickDateTime(ordered, static advisory => advisory.Modified) ?? published;
var provenance = ordered
.SelectMany(entry => entry.Advisory.Provenance)
.Distinct()
.ToList();
var precedenceTrace = ordered
.SelectMany(entry => entry.Sources)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static source => source, StringComparer.OrdinalIgnoreCase)
.ToArray();
var mergeProvenance = new AdvisoryProvenance(
source: "merge",
kind: "precedence",
value: string.Join("|", precedenceTrace),
recordedAt: _timeProvider.GetUtcNow());
provenance.Add(mergeProvenance);
var exploitKnown = ordered.Any(entry => entry.Advisory.ExploitKnown);
LogOverrides(advisoryKey, ordered);
LogPackageOverrides(advisoryKey, packageResult.Overrides);
RecordFieldConflicts(advisoryKey, ordered);
return new Advisory(
advisoryKey,
title,
summary,
language,
published,
modified,
severity,
exploitKnown,
aliases,
credits,
references,
affectedPackages,
cvssMetrics,
provenance);
}
private static void RecordNormalizedRuleMetrics(IReadOnlyList<AffectedPackage> packages)
{
if (packages.Count == 0)
{
return;
}
foreach (var package in packages)
{
var packageType = package.Type ?? string.Empty;
var normalizedVersions = package.NormalizedVersions;
if (normalizedVersions.Length > 0)
{
foreach (var rule in normalizedVersions)
{
var tags = new KeyValuePair<string, object?>[]
{
new("package_type", packageType),
new("scheme", rule.Scheme ?? string.Empty),
};
NormalizedRuleCounter.Add(1, tags);
}
}
else if (package.VersionRanges.Length > 0)
{
var tags = new KeyValuePair<string, object?>[]
{
new("package_type", packageType),
};
MissingNormalizedRuleCounter.Add(1, tags);
}
}
}
private string? PickString(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, string?> selector)
{
foreach (var entry in ordered)
{
var value = selector(entry.Advisory);
if (!string.IsNullOrWhiteSpace(value))
{
return value.Trim();
}
}
return null;
}
private DateTimeOffset? PickDateTime(IEnumerable<AdvisoryEntry> ordered, Func<Advisory, DateTimeOffset?> selector)
{
foreach (var entry in ordered)
{
var value = selector(entry.Advisory);
if (value.HasValue)
{
return value.Value.ToUniversalTime();
}
}
return null;
}
private int GetRank(Advisory advisory)
{
var best = _fallbackRank;
foreach (var provenance in advisory.Provenance)
{
if (string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
if (_precedence.TryGetValue(provenance.Source, out var rank) && rank < best)
{
best = rank;
}
}
return best;
}
private void LogOverrides(string advisoryKey, IReadOnlyList<AdvisoryEntry> ordered)
{
if (ordered.Count <= 1)
{
return;
}
var primary = ordered[0];
var primaryRank = primary.Rank;
for (var i = 1; i < ordered.Count; i++)
{
var candidate = ordered[i];
if (candidate.Rank <= primaryRank)
{
continue;
}
var tags = new KeyValuePair<string, object?>[]
{
new("primary_source", FormatSourceLabel(primary.Sources)),
new("suppressed_source", FormatSourceLabel(candidate.Sources)),
new("primary_rank", primaryRank),
new("suppressed_rank", candidate.Rank),
};
OverridesCounter.Add(1, tags);
var audit = new MergeOverrideAudit(
advisoryKey,
primary.Sources,
primaryRank,
candidate.Sources,
candidate.Rank,
primary.Advisory.Aliases.Length,
candidate.Advisory.Aliases.Length,
primary.Advisory.Provenance.Length,
candidate.Advisory.Provenance.Length);
OverrideLogged(_logger, audit, null);
}
}
private void LogPackageOverrides(string advisoryKey, IReadOnlyList<AffectedPackageOverride> overrides)
{
if (overrides.Count == 0)
{
return;
}
foreach (var record in overrides)
{
var tags = new KeyValuePair<string, object?>[]
{
new("advisory_key", advisoryKey),
new("package_type", record.Type),
new("primary_source", FormatSourceLabel(record.PrimarySources)),
new("suppressed_source", FormatSourceLabel(record.SuppressedSources)),
new("primary_rank", record.PrimaryRank),
new("suppressed_rank", record.SuppressedRank),
new("primary_range_count", record.PrimaryRangeCount),
new("suppressed_range_count", record.SuppressedRangeCount),
};
RangeOverrideCounter.Add(1, tags);
var audit = new PackageOverrideAudit(
advisoryKey,
record.Type,
record.Identifier,
record.Platform,
record.PrimaryRank,
record.SuppressedRank,
record.PrimarySources,
record.SuppressedSources,
record.PrimaryRangeCount,
record.SuppressedRangeCount);
RangeOverrideLogged(_logger, audit, null);
}
}
private void RecordFieldConflicts(string advisoryKey, IReadOnlyList<AdvisoryEntry> ordered)
{
if (ordered.Count <= 1)
{
return;
}
var primary = ordered[0];
var primarySeverity = NormalizeSeverity(primary.Advisory.Severity);
for (var i = 1; i < ordered.Count; i++)
{
var candidate = ordered[i];
var candidateSeverity = NormalizeSeverity(candidate.Advisory.Severity);
if (!string.IsNullOrEmpty(candidateSeverity))
{
var reason = string.IsNullOrEmpty(primarySeverity) ? "primary_missing" : "mismatch";
if (string.IsNullOrEmpty(primarySeverity) || !string.Equals(primarySeverity, candidateSeverity, StringComparison.OrdinalIgnoreCase))
{
RecordConflict(
advisoryKey,
"severity",
reason,
primary,
candidate,
primarySeverity ?? "(none)",
candidateSeverity);
}
}
if (candidate.Rank == primary.Rank)
{
RecordConflict(
advisoryKey,
"precedence_tie",
"equal_rank",
primary,
candidate,
primary.Rank.ToString(CultureInfo.InvariantCulture),
candidate.Rank.ToString(CultureInfo.InvariantCulture));
}
}
}
private void RecordConflict(
string advisoryKey,
string conflictType,
string reason,
AdvisoryEntry primary,
AdvisoryEntry suppressed,
string? primaryValue,
string? suppressedValue)
{
var tags = new KeyValuePair<string, object?>[]
{
new("type", conflictType),
new("reason", reason),
new("primary_source", FormatSourceLabel(primary.Sources)),
new("suppressed_source", FormatSourceLabel(suppressed.Sources)),
new("primary_rank", primary.Rank),
new("suppressed_rank", suppressed.Rank),
};
ConflictCounter.Add(1, tags);
var audit = new MergeFieldConflictAudit(
advisoryKey,
conflictType,
reason,
primary.Sources,
primary.Rank,
suppressed.Sources,
suppressed.Rank,
primaryValue,
suppressedValue);
ConflictLogged(_logger, audit, null);
}
private readonly record struct AdvisoryEntry(Advisory Advisory, int Rank)
{
public IReadOnlyCollection<string> Sources { get; } = Advisory.Provenance
.Select(static p => p.Source)
.Where(static source => !string.IsNullOrWhiteSpace(source))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static string? NormalizeSeverity(string? severity)
=> SeverityNormalization.Normalize(severity);
private static AffectedPackagePrecedenceResolver EnsureResolver(
AffectedPackagePrecedenceResolver? resolver,
AdvisoryPrecedenceOptions? options,
out IReadOnlyDictionary<string, int> precedence)
{
precedence = AdvisoryPrecedenceTable.Merge(AdvisoryPrecedenceDefaults.Rankings, options);
if (resolver is null)
{
return new AffectedPackagePrecedenceResolver(precedence);
}
if (DictionaryEquals(resolver.Precedence, precedence))
{
return resolver;
}
return new AffectedPackagePrecedenceResolver(precedence);
}
private static bool DictionaryEquals(
IReadOnlyDictionary<string, int> left,
IReadOnlyDictionary<string, int> right)
{
if (ReferenceEquals(left, right))
{
return true;
}
if (left.Count != right.Count)
{
return false;
}
foreach (var (key, value) in left)
{
if (!right.TryGetValue(key, out var other) || other != value)
{
return false;
}
}
return true;
}
private static string FormatSourceLabel(IReadOnlyCollection<string> sources)
{
if (sources.Count == 0)
{
return "unknown";
}
if (sources.Count == 1)
{
return sources.First();
}
return string.Join('|', sources.OrderBy(static s => s, StringComparer.OrdinalIgnoreCase).Take(3));
}
private readonly record struct MergeOverrideAudit(
string AdvisoryKey,
IReadOnlyCollection<string> PrimarySources,
int PrimaryRank,
IReadOnlyCollection<string> SuppressedSources,
int SuppressedRank,
int PrimaryAliasCount,
int SuppressedAliasCount,
int PrimaryProvenanceCount,
int SuppressedProvenanceCount);
private readonly record struct PackageOverrideAudit(
string AdvisoryKey,
string PackageType,
string Identifier,
string? Platform,
int PrimaryRank,
int SuppressedRank,
IReadOnlyCollection<string> PrimarySources,
IReadOnlyCollection<string> SuppressedSources,
int PrimaryRangeCount,
int SuppressedRangeCount);
private readonly record struct MergeFieldConflictAudit(
string AdvisoryKey,
string ConflictType,
string Reason,
IReadOnlyCollection<string> PrimarySources,
int PrimaryRank,
IReadOnlyCollection<string> SuppressedSources,
int SuppressedRank,
string? PrimaryValue,
string? SuppressedValue);
}

View File

@@ -0,0 +1,170 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using StellaOps.Concelier.Merge.Options;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Merge.Services;
/// <summary>
/// Applies source precedence rules to affected package sets so authoritative distro ranges override generic registry data.
/// </summary>
public sealed class AffectedPackagePrecedenceResolver
{
private readonly IReadOnlyDictionary<string, int> _precedence;
private readonly int _fallbackRank;
public AffectedPackagePrecedenceResolver()
: this(AdvisoryPrecedenceDefaults.Rankings)
{
}
public AffectedPackagePrecedenceResolver(AdvisoryPrecedenceOptions? options)
: this(AdvisoryPrecedenceTable.Merge(AdvisoryPrecedenceDefaults.Rankings, options))
{
}
public AffectedPackagePrecedenceResolver(IReadOnlyDictionary<string, int> precedence)
{
_precedence = precedence ?? throw new ArgumentNullException(nameof(precedence));
_fallbackRank = precedence.Count == 0 ? 10 : precedence.Values.Max() + 1;
}
public IReadOnlyDictionary<string, int> Precedence => _precedence;
public AffectedPackagePrecedenceResult Merge(IEnumerable<AffectedPackage> packages)
{
ArgumentNullException.ThrowIfNull(packages);
var grouped = packages
.Where(static pkg => pkg is not null)
.GroupBy(pkg => (pkg.Type, pkg.Identifier, pkg.Platform ?? string.Empty));
var resolved = new List<AffectedPackage>();
var overrides = new List<AffectedPackageOverride>();
foreach (var group in grouped)
{
var ordered = group
.Select(pkg => new PackageEntry(pkg, GetPrecedence(pkg)))
.OrderBy(static entry => entry.Rank)
.ThenByDescending(static entry => entry.Package.Provenance.Length)
.ThenByDescending(static entry => entry.Package.VersionRanges.Length)
.ToList();
var primary = ordered[0];
var provenance = ordered
.SelectMany(static entry => entry.Package.Provenance)
.Where(static p => p is not null)
.Distinct()
.ToImmutableArray();
var statuses = ordered
.SelectMany(static entry => entry.Package.Statuses)
.Distinct(AffectedPackageStatusEqualityComparer.Instance)
.ToImmutableArray();
var normalizedRules = ordered
.SelectMany(static entry => entry.Package.NormalizedVersions)
.Distinct(NormalizedVersionRuleEqualityComparer.Instance)
.OrderBy(static rule => rule, NormalizedVersionRuleComparer.Instance)
.ToImmutableArray();
foreach (var candidate in ordered.Skip(1))
{
if (candidate.Package.VersionRanges.Length == 0)
{
continue;
}
overrides.Add(new AffectedPackageOverride(
primary.Package.Type,
primary.Package.Identifier,
string.IsNullOrWhiteSpace(primary.Package.Platform) ? null : primary.Package.Platform,
primary.Rank,
candidate.Rank,
ExtractSources(primary.Package),
ExtractSources(candidate.Package),
primary.Package.VersionRanges.Length,
candidate.Package.VersionRanges.Length));
}
var merged = new AffectedPackage(
primary.Type,
primary.Identifier,
string.IsNullOrWhiteSpace(primary.Platform) ? null : primary.Platform,
primary.Package.VersionRanges,
statuses,
provenance,
normalizedRules);
resolved.Add(merged);
}
var packagesResult = resolved
.OrderBy(static pkg => pkg.Type, StringComparer.Ordinal)
.ThenBy(static pkg => pkg.Identifier, StringComparer.Ordinal)
.ThenBy(static pkg => pkg.Platform, StringComparer.Ordinal)
.ToImmutableArray();
return new AffectedPackagePrecedenceResult(packagesResult, overrides.ToImmutableArray());
}
private int GetPrecedence(AffectedPackage package)
{
var bestRank = _fallbackRank;
foreach (var provenance in package.Provenance)
{
if (provenance is null || string.IsNullOrWhiteSpace(provenance.Source))
{
continue;
}
if (_precedence.TryGetValue(provenance.Source, out var rank) && rank < bestRank)
{
bestRank = rank;
}
}
return bestRank;
}
private static IReadOnlyList<string> ExtractSources(AffectedPackage package)
{
if (package.Provenance.Length == 0)
{
return Array.Empty<string>();
}
return package.Provenance
.Select(static p => p.Source)
.Where(static source => !string.IsNullOrWhiteSpace(source))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private readonly record struct PackageEntry(AffectedPackage Package, int Rank)
{
public string Type => Package.Type;
public string Identifier => Package.Identifier;
public string? Platform => string.IsNullOrWhiteSpace(Package.Platform) ? null : Package.Platform;
}
}
public sealed record AffectedPackagePrecedenceResult(
IReadOnlyList<AffectedPackage> Packages,
IReadOnlyList<AffectedPackageOverride> Overrides);
public sealed record AffectedPackageOverride(
string Type,
string Identifier,
string? Platform,
int PrimaryRank,
int SuppressedRank,
IReadOnlyList<string> PrimarySources,
IReadOnlyList<string> SuppressedSources,
int PrimaryRangeCount,
int SuppressedRangeCount);

View File

@@ -0,0 +1,139 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Storage.Mongo.Aliases;
namespace StellaOps.Concelier.Merge.Services;
public sealed class AliasGraphResolver
{
private readonly IAliasStore _aliasStore;
public AliasGraphResolver(IAliasStore aliasStore)
{
_aliasStore = aliasStore ?? throw new ArgumentNullException(nameof(aliasStore));
}
public async Task<AliasIdentityResult> ResolveAsync(string advisoryKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrEmpty(advisoryKey);
var aliases = await _aliasStore.GetByAdvisoryAsync(advisoryKey, cancellationToken).ConfigureAwait(false);
var collisions = new List<AliasCollision>();
foreach (var alias in aliases)
{
var candidates = await _aliasStore.GetByAliasAsync(alias.Scheme, alias.Value, cancellationToken).ConfigureAwait(false);
var advisoryKeys = candidates
.Select(static candidate => candidate.AdvisoryKey)
.Where(static key => !string.IsNullOrWhiteSpace(key))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
if (advisoryKeys.Length <= 1)
{
continue;
}
collisions.Add(new AliasCollision(alias.Scheme, alias.Value, advisoryKeys));
}
var unique = new Dictionary<string, AliasCollision>(StringComparer.Ordinal);
foreach (var collision in collisions)
{
var key = $"{collision.Scheme}\u0001{collision.Value}";
if (!unique.ContainsKey(key))
{
unique[key] = collision;
}
}
var distinctCollisions = unique.Values.ToArray();
return new AliasIdentityResult(advisoryKey, aliases, distinctCollisions);
}
public async Task<AliasComponent> BuildComponentAsync(string advisoryKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrEmpty(advisoryKey);
var visited = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var queue = new Queue<string>();
var collisionMap = new Dictionary<string, AliasCollision>(StringComparer.Ordinal);
var aliasCache = new Dictionary<string, IReadOnlyList<AliasRecord>>(StringComparer.OrdinalIgnoreCase);
queue.Enqueue(advisoryKey);
while (queue.Count > 0)
{
cancellationToken.ThrowIfCancellationRequested();
var current = queue.Dequeue();
if (!visited.Add(current))
{
continue;
}
var aliases = await GetAliasesAsync(current, cancellationToken, aliasCache).ConfigureAwait(false);
aliasCache[current] = aliases;
foreach (var alias in aliases)
{
var aliasRecords = await GetAdvisoriesForAliasAsync(alias.Scheme, alias.Value, cancellationToken).ConfigureAwait(false);
var advisoryKeys = aliasRecords
.Select(static record => record.AdvisoryKey)
.Where(static key => !string.IsNullOrWhiteSpace(key))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
if (advisoryKeys.Length <= 1)
{
continue;
}
foreach (var candidate in advisoryKeys)
{
if (!visited.Contains(candidate))
{
queue.Enqueue(candidate);
}
}
var collision = new AliasCollision(alias.Scheme, alias.Value, advisoryKeys);
var key = $"{collision.Scheme}\u0001{collision.Value}";
collisionMap.TryAdd(key, collision);
}
}
var aliasMap = new Dictionary<string, IReadOnlyList<AliasRecord>>(aliasCache, StringComparer.OrdinalIgnoreCase);
return new AliasComponent(advisoryKey, visited.ToArray(), collisionMap.Values.ToArray(), aliasMap);
}
private async Task<IReadOnlyList<AliasRecord>> GetAliasesAsync(
string advisoryKey,
CancellationToken cancellationToken,
IDictionary<string, IReadOnlyList<AliasRecord>> cache)
{
if (cache.TryGetValue(advisoryKey, out var cached))
{
return cached;
}
var aliases = await _aliasStore.GetByAdvisoryAsync(advisoryKey, cancellationToken).ConfigureAwait(false);
cache[advisoryKey] = aliases;
return aliases;
}
private Task<IReadOnlyList<AliasRecord>> GetAdvisoriesForAliasAsync(
string scheme,
string value,
CancellationToken cancellationToken)
=> _aliasStore.GetByAliasAsync(scheme, value, cancellationToken);
}
public sealed record AliasIdentityResult(string AdvisoryKey, IReadOnlyList<AliasRecord> Aliases, IReadOnlyList<AliasCollision> Collisions);
public sealed record AliasComponent(
string SeedAdvisoryKey,
IReadOnlyList<string> AdvisoryKeys,
IReadOnlyList<AliasCollision> Collisions,
IReadOnlyDictionary<string, IReadOnlyList<AliasRecord>> AliasMap);

View File

@@ -0,0 +1,25 @@
namespace StellaOps.Concelier.Merge.Services;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Concelier.Models;
/// <summary>
/// Computes deterministic hashes over canonical advisory JSON payloads.
/// </summary>
public sealed class CanonicalHashCalculator
{
private static readonly UTF8Encoding Utf8NoBom = new(false);
public byte[] ComputeHash(Advisory? advisory)
{
if (advisory is null)
{
return Array.Empty<byte>();
}
var canonical = CanonicalJsonSerializer.Serialize(CanonicalJsonSerializer.Normalize(advisory));
var payload = Utf8NoBom.GetBytes(canonical);
return SHA256.HashData(payload);
}
}

View File

@@ -0,0 +1,72 @@
namespace StellaOps.Concelier.Merge.Services;
using System.Security.Cryptography;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
/// <summary>
/// Persists merge events with canonical before/after hashes for auditability.
/// </summary>
public sealed class MergeEventWriter
{
private readonly IMergeEventStore _mergeEventStore;
private readonly CanonicalHashCalculator _hashCalculator;
private readonly TimeProvider _timeProvider;
private readonly ILogger<MergeEventWriter> _logger;
public MergeEventWriter(
IMergeEventStore mergeEventStore,
CanonicalHashCalculator hashCalculator,
TimeProvider timeProvider,
ILogger<MergeEventWriter> logger)
{
_mergeEventStore = mergeEventStore ?? throw new ArgumentNullException(nameof(mergeEventStore));
_hashCalculator = hashCalculator ?? throw new ArgumentNullException(nameof(hashCalculator));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<MergeEventRecord> AppendAsync(
string advisoryKey,
Advisory? before,
Advisory after,
IReadOnlyList<Guid> inputDocumentIds,
IReadOnlyList<MergeFieldDecision>? fieldDecisions,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(advisoryKey);
ArgumentNullException.ThrowIfNull(after);
var beforeHash = _hashCalculator.ComputeHash(before);
var afterHash = _hashCalculator.ComputeHash(after);
var timestamp = _timeProvider.GetUtcNow();
var documentIds = inputDocumentIds?.ToArray() ?? Array.Empty<Guid>();
var record = new MergeEventRecord(
Guid.NewGuid(),
advisoryKey,
beforeHash,
afterHash,
timestamp,
documentIds,
fieldDecisions ?? Array.Empty<MergeFieldDecision>());
if (!CryptographicOperations.FixedTimeEquals(beforeHash, afterHash))
{
_logger.LogInformation(
"Merge event for {AdvisoryKey} changed hash {BeforeHash} -> {AfterHash}",
advisoryKey,
Convert.ToHexString(beforeHash),
Convert.ToHexString(afterHash));
}
else
{
_logger.LogInformation("Merge event for {AdvisoryKey} recorded without hash change", advisoryKey);
}
await _mergeEventStore.AppendAsync(record, cancellationToken).ConfigureAwait(false);
return record;
}
}

View File

@@ -0,0 +1,18 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.0" />
<PackageReference Include="Semver" Version="2.3.0" />
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,21 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|Identity graph and alias resolver|BE-Merge|Models, Storage.Mongo|DONE `AdvisoryIdentityResolver` builds alias-driven clusters with canonical key selection + unit coverage.|
|Precedence policy engine|BE-Merge|Architecture|**DONE** precedence defaults enforced by `AdvisoryPrecedenceMerger`/`AdvisoryPrecedenceDefaults` with distro/PSIRT overriding registry feeds and CERT/KEV enrichers.|
|NEVRA comparer plus tests|BE-Merge (Distro WG)|Source.Distro fixtures|DONE Added Nevra parser/comparer with tilde-aware rpm ordering and unit coverage.|
|Debian EVR comparer plus tests|BE-Merge (Distro WG)|Debian fixtures|DONE DebianEvr comparer mirrors dpkg ordering with tilde/epoch handling and unit coverage.|
|SemVer range resolver plus tests|BE-Merge (OSS WG)|OSV/GHSA fixtures|DONE SemanticVersionRangeResolver covers introduced/fixed/lastAffected semantics with SemVer ordering tests.|
|Canonical hash and merge_event writer|BE-Merge|Models, Storage.Mongo|DONE Hash calculator + MergeEventWriter compute canonical SHA-256 digests and persist merge events.|
|Conflict detection and metrics|BE-Merge|Core|**DONE** merge meters emit override/conflict counters and structured audits (`AdvisoryPrecedenceMerger`).|
|FEEDMERGE-ENGINE-04-001 GHSA/NVD/OSV conflict rules|BE-Merge|Core, Storage.Mongo|DONE `AdvisoryMergeService` applies `CanonicalMerger` output before precedence merge, replacing source advisories with the canonical transcript. **Coordination:** connector fixture owners should surface canonical deltas to Merge QA before regression sign-off.|
|FEEDMERGE-ENGINE-04-002 Override metrics instrumentation|BE-Merge|Observability|DONE merge events persist `MergeFieldDecision` records enabling analytics on precedence/freshness decisions. **Next:** hand off metrics schema to Ops for dashboard wiring.|
|FEEDMERGE-ENGINE-04-003 Reference & credit union pipeline|BE-Merge|Models|DONE canonical merge preserves union semantics while respecting precedence, validated via updated credit union tests.|
|End-to-end determinism test|QA|Merge, key connectors|**DONE** `MergePrecedenceIntegrationTests.MergePipeline_IsDeterministicAcrossRuns` guards determinism.|
|FEEDMERGE-QA-04-001 End-to-end conflict regression suite|QA|Merge|DONE `AdvisoryMergeServiceTests.MergeAsync_AppliesCanonicalRulesAndPersistsDecisions` exercises GHSA/NVD/OSV conflict path and merge-event analytics. **Reminder:** QA to sync with connector teams once new fixture triples land.|
|Override audit logging|BE-Merge|Observability|DONE override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.|
|Configurable precedence table|BE-Merge|Architecture|DONE precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.|
|Range primitives backlog|BE-Merge|Connector WGs|**DOING** Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.|
|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.|
|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.|
|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|TODO Persist conflict sets referencing advisory statements, output rule/explainer payloads with replay hashes, and add integration tests covering deterministic `asOf` evaluations.|