Add SBOM, symbols, traces, and VEX files for CVE-2022-21661 SQLi case
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Created CycloneDX and SPDX SBOM files for both reachable and unreachable images.
- Added symbols.json detailing function entry and sink points in the WordPress code.
- Included runtime traces for function calls in both reachable and unreachable scenarios.
- Developed OpenVEX files indicating vulnerability status and justification for both cases.
- Updated README for evaluator harness to guide integration with scanner output.
This commit is contained in:
master
2025-11-08 20:53:45 +02:00
parent 515975edc5
commit 536f6249a6
837 changed files with 37279 additions and 14675 deletions

View File

@@ -25,17 +25,18 @@ namespace StellaOps.Concelier.Connector.Cccs;
public sealed class CccsConnector : IFeedConnector
{
private static readonly JsonSerializerOptions RawSerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
private static readonly JsonSerializerOptions DtoSerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
private const string DtoSchemaVersion = "cccs.dto.v1";
private static readonly JsonSerializerOptions RawSerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
private static readonly JsonSerializerOptions DtoSerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
private static readonly Uri CanonicalBaseUri = new("https://www.cyber.gc.ca", UriKind.Absolute);
private const string DtoSchemaVersion = "cccs.dto.v1";
private readonly CccsFeedClient _feedClient;
private readonly RawDocumentStorage _rawDocumentStorage;
@@ -482,24 +483,37 @@ public sealed class CccsConnector : IFeedConnector
}
}
private static string BuildDocumentUri(CccsFeedItem item, CccsFeedEndpoint feed)
{
if (!string.IsNullOrWhiteSpace(item.Url))
{
if (Uri.TryCreate(item.Url, UriKind.Absolute, out var absolute))
{
return absolute.ToString();
}
var baseUri = new Uri("https://www.cyber.gc.ca", UriKind.Absolute);
if (Uri.TryCreate(baseUri, item.Url, out var combined))
{
return combined.ToString();
}
}
return $"https://www.cyber.gc.ca/api/cccs/threats/{feed.Language}/{item.Nid}";
}
private static string BuildDocumentUri(CccsFeedItem item, CccsFeedEndpoint feed)
{
var candidate = item.Url?.Trim();
if (!string.IsNullOrWhiteSpace(candidate))
{
if (Uri.TryCreate(candidate, UriKind.Absolute, out var absolute))
{
if (IsHttpScheme(absolute.Scheme))
{
return absolute.ToString();
}
candidate = absolute.PathAndQuery;
if (!string.IsNullOrEmpty(absolute.Fragment))
{
candidate += absolute.Fragment;
}
}
if (!string.IsNullOrWhiteSpace(candidate) && Uri.TryCreate(CanonicalBaseUri, candidate, out var combined))
{
return combined.ToString();
}
}
return new Uri(CanonicalBaseUri, $"/api/cccs/threats/{feed.Language}/{item.Nid}").ToString();
}
private static bool IsHttpScheme(string? scheme)
=> string.Equals(scheme, Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase)
|| string.Equals(scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase);
private static CccsRawAdvisoryDocument CreateRawDocument(CccsFeedItem item, CccsFeedEndpoint feed, IReadOnlyDictionary<int, string> taxonomy)
{

View File

@@ -125,11 +125,16 @@ public sealed class CccsFeedEndpoint
throw new InvalidOperationException("Feed endpoint URI must be configured before building taxonomy URI.");
}
var language = Uri.GetQueryParameterValueOrDefault("lang", Language);
var builder = $"https://www.cyber.gc.ca/api/cccs/taxonomy/v1/get?lang={language}&vocabulary=cccs_alert_type";
return new Uri(builder, UriKind.Absolute);
}
}
var language = Uri.GetQueryParameterValueOrDefault("lang", Language);
var taxonomyBuilder = new UriBuilder(Uri)
{
Path = "/api/cccs/taxonomy/v1/get",
Query = $"lang={language}&vocabulary=cccs_alert_type"
};
return taxonomyBuilder.Uri;
}
}
internal static class CccsUriExtensions
{

View File

@@ -348,19 +348,21 @@ public sealed class CccsHtmlParser
private static string? NormalizeReferenceUrl(string? href, Uri? baseUri, string language)
{
if (string.IsNullOrWhiteSpace(href))
{
return null;
}
if (!Uri.TryCreate(href, UriKind.Absolute, out var absolute))
{
if (baseUri is null || !Uri.TryCreate(baseUri, href, out absolute))
{
return null;
}
}
if (string.IsNullOrWhiteSpace(href))
{
return null;
}
var candidate = href.Trim();
var hasAbsolute = Uri.TryCreate(candidate, UriKind.Absolute, out var absolute);
if (!hasAbsolute || string.Equals(absolute.Scheme, Uri.UriSchemeFile, StringComparison.OrdinalIgnoreCase))
{
if (baseUri is null || !Uri.TryCreate(baseUri, candidate, out absolute))
{
return null;
}
}
var builder = new UriBuilder(absolute)
{
Fragment = string.Empty,

View File

@@ -319,12 +319,19 @@ public sealed class KisaDetailParser
}
var headerRow = labelCell.ParentElement as IHtmlTableRowElement;
var columnIndex = labelCell.CellIndex;
var columnIndex = headerRow is null
? -1
: Array.FindIndex(headerRow.Cells.ToArray(), cell => ReferenceEquals(cell, labelCell));
if (headerRow is null)
{
return null;
}
if (columnIndex < 0)
{
return null;
}
var rows = ownerTable.Rows.ToArray();
var headerIndex = Array.FindIndex(rows, row => ReferenceEquals(row, headerRow));
if (headerIndex < 0)

View File

@@ -2,10 +2,9 @@ using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Xml;
using System.Xml.Linq;
using Microsoft.Extensions.Logging;
@@ -17,10 +16,11 @@ using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Ru.Bdu.Configuration;
using StellaOps.Concelier.Connector.Ru.Bdu.Internal;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Connector.Ru.Bdu;
@@ -44,8 +44,9 @@ public sealed class RuBduConnector : IFeedConnector
private readonly TimeProvider _timeProvider;
private readonly ILogger<RuBduConnector> _logger;
private readonly string _cacheDirectory;
private readonly string _archiveCachePath;
private readonly string _cacheDirectory;
private readonly string _archiveCachePath;
private readonly ICryptoHash _hash;
public RuBduConnector(
SourceFetchService fetchService,
@@ -55,9 +56,10 @@ public sealed class RuBduConnector : IFeedConnector
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<RuBduOptions> options,
RuBduDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<RuBduConnector> logger)
RuBduDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<RuBduConnector> logger,
ICryptoHash cryptoHash)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -69,8 +71,9 @@ public sealed class RuBduConnector : IFeedConnector
_options.Validate();
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory);
_archiveCachePath = Path.Combine(_cacheDirectory, "vulxml.zip");
EnsureCacheDirectory();
}
@@ -398,7 +401,7 @@ public sealed class RuBduConnector : IFeedConnector
}
var payload = JsonSerializer.SerializeToUtf8Bytes(dto, SerializerOptions);
var sha = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
var sha = _hash.ComputeHashHex(payload);
var documentUri = BuildDocumentUri(dto.Identifier);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false);

View File

@@ -14,6 +14,7 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -4,23 +4,23 @@ using System.IO;
using System.IO.Compression;
using System.Net;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using AngleSharp.Html.Parser;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Ru.Nkcki.Configuration;
using StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using AngleSharp.Html.Parser;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Ru.Nkcki.Configuration;
using StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Connector.Ru.Nkcki;
@@ -55,11 +55,12 @@ public sealed class RuNkckiConnector : IFeedConnector
private readonly ISourceStateRepository _stateRepository;
private readonly RuNkckiOptions _options;
private readonly TimeProvider _timeProvider;
private readonly RuNkckiDiagnostics _diagnostics;
private readonly ILogger<RuNkckiConnector> _logger;
private readonly string _cacheDirectory;
private readonly HtmlParser _htmlParser = new();
private readonly RuNkckiDiagnostics _diagnostics;
private readonly ILogger<RuNkckiConnector> _logger;
private readonly string _cacheDirectory;
private readonly ICryptoHash _hash;
private readonly HtmlParser _htmlParser = new();
public RuNkckiConnector(
SourceFetchService fetchService,
@@ -69,9 +70,10 @@ public sealed class RuNkckiConnector : IFeedConnector
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<RuNkckiOptions> options,
RuNkckiDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<RuNkckiConnector> logger)
RuNkckiDiagnostics diagnostics,
TimeProvider? timeProvider,
ILogger<RuNkckiConnector> logger,
ICryptoHash cryptoHash)
{
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
@@ -79,12 +81,13 @@ public sealed class RuNkckiConnector : IFeedConnector
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory);
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_options.Validate();
_diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_cacheDirectory = ResolveCacheDirectory(_options.CacheDirectory);
EnsureCacheDirectory();
}
@@ -597,7 +600,7 @@ public sealed class RuNkckiConnector : IFeedConnector
}
var payload = JsonSerializer.SerializeToUtf8Bytes(dto, SerializerOptions);
var sha = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
var sha = _hash.ComputeHashHex(payload);
var documentUri = BuildDocumentUri(dto);
var existing = await _documentStore.FindBySourceAndUriAsync(SourceName, documentUri, cancellationToken).ConfigureAwait(false);

View File

@@ -18,6 +18,7 @@
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -8,10 +8,11 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-rc.2.25502.107" />
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,11 +1,10 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using StellaOps.Concelier.Connector.Common.Fetch;
using StellaOps.Concelier.Connector.Common;
using StellaOps.Concelier.Connector.StellaOpsMirror.Client;
@@ -15,9 +14,10 @@ using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Plugin;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Connector.StellaOpsMirror;
@@ -30,12 +30,13 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
private readonly MirrorSignatureVerifier _signatureVerifier;
private readonly RawDocumentStorage _rawDocumentStorage;
private readonly IDocumentStore _documentStore;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<StellaOpsMirrorConnector> _logger;
private readonly StellaOpsMirrorConnectorOptions _options;
private readonly IDtoStore _dtoStore;
private readonly IAdvisoryStore _advisoryStore;
private readonly ISourceStateRepository _stateRepository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<StellaOpsMirrorConnector> _logger;
private readonly StellaOpsMirrorConnectorOptions _options;
private readonly ICryptoHash _hash;
public StellaOpsMirrorConnector(
MirrorManifestClient client,
@@ -45,20 +46,22 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
IDtoStore dtoStore,
IAdvisoryStore advisoryStore,
ISourceStateRepository stateRepository,
IOptions<StellaOpsMirrorConnectorOptions> options,
TimeProvider? timeProvider,
ILogger<StellaOpsMirrorConnector> logger)
IOptions<StellaOpsMirrorConnectorOptions> options,
TimeProvider? timeProvider,
ICryptoHash cryptoHash,
ILogger<StellaOpsMirrorConnector> logger)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
_signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier));
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_hash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options));
ValidateOptions(_options);
}
@@ -280,7 +283,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
await _stateRepository.UpdateCursorAsync(Source, document, now, cancellationToken).ConfigureAwait(false);
}
private static void VerifyDigest(string expected, ReadOnlySpan<byte> payload, string path)
private void VerifyDigest(string expected, ReadOnlySpan<byte> payload, string path)
{
if (string.IsNullOrWhiteSpace(expected))
{
@@ -292,19 +295,16 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
throw new InvalidOperationException($"Unsupported digest '{expected}' for '{path}'.");
}
var actualHash = SHA256.HashData(payload);
var actual = "sha256:" + Convert.ToHexString(actualHash).ToLowerInvariant();
var actualHash = _hash.ComputeHashHex(payload, HashAlgorithms.Sha256);
var actual = "sha256:" + actualHash;
if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"Digest mismatch for '{path}'. Expected {expected}, computed {actual}.");
}
}
private static string ComputeSha256(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private string ComputeSha256(ReadOnlySpan<byte> payload)
=> _hash.ComputeHashHex(payload, HashAlgorithms.Sha256);
private static string NormalizeDigest(string digest)
{

View File

@@ -5,6 +5,7 @@ using StellaOps.Concelier.Models;
using StellaOps.Concelier.Connector.Common.Packages;
using StellaOps.Concelier.Storage.Mongo.Documents;
using StellaOps.Concelier.Storage.Mongo.Dtos;
using StellaOps.Concelier.Normalization.SemVer;
namespace StellaOps.Concelier.Connector.Vndr.Cisco.Internal;
@@ -142,8 +143,9 @@ public static class CiscoMapper
continue;
}
var range = BuildVersionRange(product, recordedAt);
var ranges = BuildVersionRanges(product, recordedAt);
var statuses = BuildStatuses(product, recordedAt);
var normalizedVersions = BuildNormalizedVersions(product, ranges);
var provenance = new[]
{
new AdvisoryProvenance(
@@ -157,10 +159,10 @@ public static class CiscoMapper
type: AffectedPackageTypes.Vendor,
identifier: product.Name,
platform: null,
versionRanges: range is null ? Array.Empty<AffectedVersionRange>() : new[] { range },
versionRanges: ranges,
statuses: statuses,
provenance: provenance,
normalizedVersions: Array.Empty<NormalizedVersionRule>()));
normalizedVersions: normalizedVersions));
}
return packages.Count == 0
@@ -168,14 +170,46 @@ public static class CiscoMapper
: packages.OrderBy(static p => p.Identifier, StringComparer.OrdinalIgnoreCase).ToArray();
}
private static AffectedVersionRange? BuildVersionRange(CiscoAffectedProductDto product, DateTimeOffset recordedAt)
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(CiscoAffectedProductDto product, DateTimeOffset recordedAt)
{
if (string.IsNullOrWhiteSpace(product.Version))
{
return null;
return Array.Empty<AffectedVersionRange>();
}
var version = product.Version.Trim();
var provenance = new AdvisoryProvenance(
VndrCiscoConnectorPlugin.SourceName,
"range",
product.ProductId ?? product.Name,
recordedAt);
var vendorExtensions = BuildVendorExtensions(product, includeVersion: true);
var semVerResults = SemVerRangeRuleBuilder.Build(version, patchedVersion: null, provenanceNote: BuildNormalizedVersionNote(product));
if (semVerResults.Count > 0)
{
var ranges = new List<AffectedVersionRange>(semVerResults.Count);
foreach (var result in semVerResults)
{
var semVerPrimitives = new RangePrimitives(
SemVer: result.Primitive,
Nevra: null,
Evr: null,
VendorExtensions: vendorExtensions);
ranges.Add(new AffectedVersionRange(
rangeKind: NormalizedVersionSchemes.SemVer,
introducedVersion: result.Primitive.Introduced,
fixedVersion: result.Primitive.Fixed,
lastAffectedVersion: result.Primitive.LastAffected,
rangeExpression: result.Expression ?? version,
provenance: provenance,
primitives: semVerPrimitives));
}
return ranges;
}
RangePrimitives? primitives = null;
string rangeKind = "vendor";
string? rangeExpression = version;
@@ -198,23 +232,20 @@ public static class CiscoMapper
}
else
{
primitives = new RangePrimitives(null, null, null, BuildVendorExtensions(product, includeVersion: true));
primitives = new RangePrimitives(null, null, null, vendorExtensions);
}
var provenance = new AdvisoryProvenance(
VndrCiscoConnectorPlugin.SourceName,
"range",
product.ProductId ?? product.Name,
recordedAt);
return new AffectedVersionRange(
return new[]
{
new AffectedVersionRange(
rangeKind: rangeKind,
introducedVersion: null,
fixedVersion: null,
lastAffectedVersion: null,
rangeExpression: rangeExpression,
provenance: provenance,
primitives: primitives);
primitives: primitives),
};
}
private static IReadOnlyDictionary<string, string>? BuildVendorExtensions(CiscoAffectedProductDto product, bool includeVersion = false)
@@ -233,6 +264,48 @@ public static class CiscoMapper
return dictionary.Count == 0 ? null : dictionary;
}
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
CiscoAffectedProductDto product,
IReadOnlyList<AffectedVersionRange> ranges)
{
if (ranges.Count == 0)
{
return Array.Empty<NormalizedVersionRule>();
}
var note = BuildNormalizedVersionNote(product);
var rules = new List<NormalizedVersionRule>(ranges.Count);
foreach (var range in ranges)
{
var rule = range.ToNormalizedVersionRule(note);
if (rule is not null)
{
rules.Add(rule);
}
}
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules.ToArray();
}
private static string? BuildNormalizedVersionNote(CiscoAffectedProductDto product)
{
if (!string.IsNullOrWhiteSpace(product.ProductId))
{
return $"cisco:{product.ProductId.Trim().ToLowerInvariant()}";
}
if (!string.IsNullOrWhiteSpace(product.Name))
{
var normalized = product.Name
.Trim()
.ToLowerInvariant()
.Replace(' ', '-');
return $"cisco:{normalized}";
}
return null;
}
private static IReadOnlyList<AffectedPackageStatus> BuildStatuses(CiscoAffectedProductDto product, DateTimeOffset recordedAt)
{
if (product.Statuses is null || product.Statuses.Count == 0)

View File

@@ -1,4 +1,4 @@
# TASKS
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|FEEDCONN-CISCO-02-009 SemVer range provenance|BE-Conn-Cisco|CONCELIER-LNM-21-001|**TODO (due 2025-10-21)** Emit Cisco SemVer ranges into `advisory_observations.affected.versions[]` with provenance identifiers (`cisco:{productId}`) and deterministic comparison keys. Update mapper/tests for the Link-Not-Merge schema and replace legacy merge counter checks with observation/linkset validation.|
|FEEDCONN-CISCO-02-009 SemVer range provenance|BE-Conn-Cisco|CONCELIER-LNM-21-001|**DOING (2025-11-08)** Emitting Cisco SemVer ranges into `advisory_observations.affected.versions[]` with provenance identifiers (`cisco:{productId}`) and deterministic comparison keys. Updating mapper/tests for the Link-Not-Merge schema and replacing legacy merge counter checks with observation/linkset validation.|

View File

@@ -1,35 +1,94 @@
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.Aoc;
using StellaOps.Concelier.RawModels;
namespace StellaOps.Concelier.Core.Aoc;
/// <summary>
/// Aggregation-Only Contract guard applied to raw advisory documents prior to persistence.
/// </summary>
public sealed class AdvisoryRawWriteGuard : IAdvisoryRawWriteGuard
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IAocGuard _guard;
private readonly AocGuardOptions _options;
public AdvisoryRawWriteGuard(IAocGuard guard, IOptions<AocGuardOptions>? options = null)
{
_guard = guard ?? throw new ArgumentNullException(nameof(guard));
_options = options?.Value ?? AocGuardOptions.Default;
}
public void EnsureValid(AdvisoryRawDocument document)
{
ArgumentNullException.ThrowIfNull(document);
using var payload = JsonDocument.Parse(JsonSerializer.Serialize(document, SerializerOptions));
var result = _guard.Validate(payload.RootElement, _options);
if (!result.IsValid)
{
throw new ConcelierAocGuardException(result);
}
}
}
using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.Aoc;
using StellaOps.Concelier.RawModels;
using StellaOps.Ingestion.Telemetry;
namespace StellaOps.Concelier.Core.Aoc;
/// <summary>
/// Aggregation-Only Contract guard applied to raw advisory documents prior to persistence.
/// </summary>
public sealed class AdvisoryRawWriteGuard : IAdvisoryRawWriteGuard
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IAocGuard _guard;
private readonly AocGuardOptions _options;
public AdvisoryRawWriteGuard(IAocGuard guard, IOptions<AocGuardOptions>? options = null)
{
_guard = guard ?? throw new ArgumentNullException(nameof(guard));
_options = options?.Value ?? AocGuardOptions.Default;
}
public void EnsureValid(AdvisoryRawDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var normalized = NormalizeDocument(document);
var serialized = JsonSerializer.Serialize(normalized, SerializerOptions);
using var guardActivity = IngestionTelemetry.StartGuardActivity(
normalized.Tenant,
normalized.Source.Vendor,
normalized.Upstream.UpstreamId,
normalized.Upstream.ContentHash,
normalized.Supersedes);
using var payload = JsonDocument.Parse(serialized);
var result = _guard.Validate(payload.RootElement, _options);
if (!result.IsValid)
{
var violationCount = result.Violations.IsDefaultOrEmpty ? 0 : result.Violations.Length;
var primaryCode = violationCount > 0 ? result.Violations[0].ErrorCode : string.Empty;
guardActivity?.SetTag("violationCount", violationCount);
if (!string.IsNullOrWhiteSpace(primaryCode))
{
guardActivity?.SetTag("code", primaryCode);
}
guardActivity?.SetStatus(ActivityStatusCode.Error, primaryCode);
throw new ConcelierAocGuardException(result);
}
guardActivity?.SetTag("violationCount", 0);
guardActivity?.SetStatus(ActivityStatusCode.Ok);
}
private static AdvisoryRawDocument NormalizeDocument(AdvisoryRawDocument document)
{
var identifiers = document.Identifiers with
{
Aliases = Normalize(document.Identifiers.Aliases)
};
var linkset = document.Linkset with
{
Aliases = Normalize(document.Linkset.Aliases),
PackageUrls = Normalize(document.Linkset.PackageUrls),
Cpes = Normalize(document.Linkset.Cpes),
References = Normalize(document.Linkset.References),
ReconciledFrom = Normalize(document.Linkset.ReconciledFrom),
Notes = Normalize(document.Linkset.Notes)
};
return document with
{
Identifiers = identifiers,
Linkset = linkset,
Links = Normalize(document.Links)
};
}
private static ImmutableArray<T> Normalize<T>(ImmutableArray<T> value) =>
value.IsDefault ? ImmutableArray<T>.Empty : value;
private static ImmutableDictionary<TKey, TValue> Normalize<TKey, TValue>(ImmutableDictionary<TKey, TValue> value)
where TKey : notnull =>
value == default ? ImmutableDictionary<TKey, TValue>.Empty : value;
}

View File

@@ -1,12 +1,14 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Linq;
using Microsoft.Extensions.Logging;
using StellaOps.Aoc;
using StellaOps.Concelier.Core.Aoc;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Aoc;
using StellaOps.Ingestion.Telemetry;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.Core.Linksets;
using StellaOps.Concelier.RawModels;
using StellaOps.Concelier.Models;
@@ -40,55 +42,104 @@ internal sealed class AdvisoryRawService : IAdvisoryRawService
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<AdvisoryRawUpsertResult> IngestAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(document);
var clientSupersedes = string.IsNullOrWhiteSpace(document.Supersedes)
? null
: document.Supersedes.Trim();
var normalized = Normalize(document);
var enriched = normalized with { Linkset = _linksetMapper.Map(normalized) };
if (!string.IsNullOrEmpty(clientSupersedes))
{
_logger.LogWarning(
"Ignoring client-supplied supersedes pointer for advisory_raw tenant={Tenant} source={Vendor} upstream={UpstreamId} pointer={Supersedes}",
enriched.Tenant,
enriched.Source.Vendor,
enriched.Upstream.UpstreamId,
clientSupersedes);
}
_writeGuard.EnsureValid(enriched);
var result = await _repository.UpsertAsync(enriched, cancellationToken).ConfigureAwait(false);
if (result.Inserted)
{
_logger.LogInformation(
"Ingested advisory_raw document id={DocumentId} tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash} supersedes={Supersedes}",
result.Record.Id,
result.Record.Document.Tenant,
result.Record.Document.Source.Vendor,
result.Record.Document.Upstream.UpstreamId,
result.Record.Document.Upstream.ContentHash,
string.IsNullOrWhiteSpace(result.Record.Document.Supersedes)
? "(none)"
: result.Record.Document.Supersedes);
}
else
{
_logger.LogDebug(
"Skipped advisory_raw duplicate tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash}",
result.Record.Document.Tenant,
result.Record.Document.Source.Vendor,
result.Record.Document.Upstream.UpstreamId,
result.Record.Document.Upstream.ContentHash);
}
return result;
}
public async Task<AdvisoryRawUpsertResult> IngestAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(document);
var clientSupersedes = string.IsNullOrWhiteSpace(document.Supersedes)
? null
: document.Supersedes.Trim();
var transformWatch = Stopwatch.StartNew();
var initialPayloadBytes = EstimatePayloadBytes(document.Content.Raw);
using var transformActivity = IngestionTelemetry.StartTransformActivity(
document.Tenant,
document.Source.Vendor,
document.Upstream.UpstreamId,
document.Upstream.ContentHash,
document.Content.Format,
initialPayloadBytes);
var normalized = Normalize(document);
var enriched = normalized with { Linkset = _linksetMapper.Map(normalized) };
transformWatch.Stop();
var tenant = enriched.Tenant;
var source = enriched.Source.Vendor;
var upstreamId = enriched.Upstream.UpstreamId;
var contentHash = enriched.Upstream.ContentHash;
if (!string.IsNullOrEmpty(clientSupersedes))
{
_logger.LogWarning(
"Ignoring client-supplied supersedes pointer for advisory_raw tenant={Tenant} source={Vendor} upstream={UpstreamId} pointer={Supersedes}",
tenant,
source,
upstreamId,
clientSupersedes);
}
transformActivity?.SetTag("tenant", tenant);
transformActivity?.SetTag("source", source);
transformActivity?.SetTag("upstream.id", upstreamId);
transformActivity?.SetTag("contentHash", contentHash);
transformActivity?.SetTag("documentType", enriched.Content.Format);
transformActivity?.SetTag("payloadBytes", initialPayloadBytes);
IngestionTelemetry.RecordLatency(tenant, source, IngestionTelemetry.PhaseTransform, transformWatch.Elapsed);
try
{
_writeGuard.EnsureValid(enriched);
}
catch (ConcelierAocGuardException guardException)
{
IngestionTelemetry.RecordViolation(tenant, source, guardException.PrimaryErrorCode);
IngestionTelemetry.RecordWriteAttempt(tenant, source, IngestionTelemetry.ResultReject);
throw;
}
var result = await _repository.UpsertAsync(enriched, cancellationToken).ConfigureAwait(false);
IngestionTelemetry.RecordWriteAttempt(tenant, source, result.Inserted ? IngestionTelemetry.ResultOk : IngestionTelemetry.ResultNoop);
if (result.Inserted)
{
_logger.LogInformation(
"Ingested advisory_raw document id={DocumentId} tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash} supersedes={Supersedes}",
result.Record.Id,
tenant,
source,
upstreamId,
contentHash,
string.IsNullOrWhiteSpace(result.Record.Document.Supersedes)
? "(none)"
: result.Record.Document.Supersedes);
}
else
{
_logger.LogDebug(
"Skipped advisory_raw duplicate tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash}",
tenant,
source,
upstreamId,
contentHash);
}
return result;
}
private static long EstimatePayloadBytes(JsonElement element)
{
try
{
var text = element.GetRawText();
return Encoding.UTF8.GetByteCount(text);
}
catch (InvalidOperationException)
{
return 0;
}
}
public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken)
{

View File

@@ -18,7 +18,8 @@
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Ingestion.Telemetry\StellaOps.Ingestion.Telemetry.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -78,7 +78,7 @@
## Observability & Forensics (Epic 15)
| ID | Status | Owner(s) | Depends on | Notes |
|----|--------|----------|------------|-------|
| CONCELIER-OBS-50-001 `Telemetry adoption` | TODO | Concelier Core Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Replace ad-hoc logging with telemetry core across ingestion/linking pipelines; ensure spans/logs include tenant, source vendor, upstream id, content hash, and trace IDs. |
| CONCELIER-OBS-50-001 `Telemetry adoption` | DONE (2025-11-07) | Concelier Core Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Replace ad-hoc logging with telemetry core across ingestion/linking pipelines; ensure spans/logs include tenant, source vendor, upstream id, content hash, and trace IDs. |
| CONCELIER-OBS-51-001 `Metrics & SLOs` | TODO | Concelier Core Guild, DevOps Guild | CONCELIER-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for ingest latency (cold/warm), queue depth, aoc violation rate, and publish SLO burn-rate alerts (ingest P95 <30s cold / <5s warm). Ship dashboards + alert configs. |
| CONCELIER-OBS-52-001 `Timeline events` | TODO | Concelier Core Guild | CONCELIER-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` records for advisory ingest/normalization/linkset creation with provenance, trace IDs, conflict summaries, and evidence placeholders. |
| CONCELIER-OBS-53-001 `Evidence snapshots` | TODO | Concelier Core Guild, Evidence Locker Guild | CONCELIER-OBS-52-001, EVID-OBS-53-002 | Produce advisory evaluation bundle payloads (raw doc, linkset, normalization diff) for evidence locker; ensure Merkle manifests seeded with content hashes. |

View File

@@ -1,28 +1,33 @@
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Exporter.Json;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading.Tasks;
using StellaOps.Concelier.Models;
using StellaOps.Cryptography;
namespace StellaOps.Concelier.Exporter.Json;
/// <summary>
/// Writes canonical advisory snapshots into a vuln-list style directory tree with deterministic ordering.
/// </summary>
public sealed class JsonExportSnapshotBuilder
{
private static readonly Encoding Utf8NoBom = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false);
private readonly JsonExportOptions _options;
private readonly IJsonExportPathResolver _pathResolver;
public JsonExportSnapshotBuilder(JsonExportOptions options, IJsonExportPathResolver pathResolver)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
}
private static readonly Encoding Utf8NoBom = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false);
private readonly JsonExportOptions _options;
private readonly IJsonExportPathResolver _pathResolver;
private readonly ICryptoHash _hash;
public JsonExportSnapshotBuilder(
JsonExportOptions options,
IJsonExportPathResolver pathResolver,
ICryptoHash? hash = null)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_pathResolver = pathResolver ?? throw new ArgumentNullException(nameof(pathResolver));
_hash = hash ?? CryptoHashFactory.CreateDefault();
}
public Task<JsonExportResult> WriteAsync(
IReadOnlyCollection<Advisory> advisories,
@@ -97,7 +102,7 @@ public sealed class JsonExportSnapshotBuilder
await File.WriteAllBytesAsync(destination, bytes, cancellationToken).ConfigureAwait(false);
File.SetLastWriteTimeUtc(destination, exportedAt.UtcDateTime);
var digest = ComputeDigest(bytes);
var digest = ComputeDigest(bytes);
files.Add(new JsonExportFile(entry.RelativePath, bytes.LongLength, digest));
totalBytes += bytes.LongLength;
}
@@ -232,10 +237,9 @@ public sealed class JsonExportSnapshotBuilder
private sealed record PathResolution(Advisory Advisory, string RelativePath, IReadOnlyList<string> Segments);
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
var hex = Convert.ToHexString(hash).ToLowerInvariant();
return $"sha256:{hex}";
}
}
private string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hex = _hash.ComputeHashHex(payload, HashAlgorithms.Sha256);
return $"sha256:{hex}";
}
}

View File

@@ -4,12 +4,14 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Exporting;
using StellaOps.Cryptography;
using StellaOps.Plugin;
namespace StellaOps.Concelier.Exporter.Json;
@@ -51,15 +53,16 @@ public sealed class JsonFeedExporter : IFeedExporter
public async Task ExportAsync(IServiceProvider services, CancellationToken cancellationToken)
{
var exportedAt = _timeProvider.GetUtcNow();
var exportId = exportedAt.ToString(_options.DirectoryNameFormat, CultureInfo.InvariantCulture);
var exportRoot = Path.GetFullPath(_options.OutputRoot);
var exportedAt = _timeProvider.GetUtcNow();
var exportId = exportedAt.ToString(_options.DirectoryNameFormat, CultureInfo.InvariantCulture);
var exportRoot = Path.GetFullPath(_options.OutputRoot);
_logger.LogInformation("Starting JSON export {ExportId}", exportId);
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
var builder = new JsonExportSnapshotBuilder(_options, _pathResolver);
var existingState = await _stateManager.GetAsync(ExporterId, cancellationToken).ConfigureAwait(false);
var cryptoHash = services.GetRequiredService<ICryptoHash>();
var builder = new JsonExportSnapshotBuilder(_options, _pathResolver, cryptoHash);
var canonicalAdvisories = await MaterializeCanonicalAdvisoriesAsync(cancellationToken).ConfigureAwait(false);
var result = await builder.WriteAsync(canonicalAdvisories, exportedAt, exportId, cancellationToken).ConfigureAwait(false);
result = await JsonMirrorBundleWriter.WriteAsync(result, _options, services, _timeProvider, _logger, cancellationToken).ConfigureAwait(false);

View File

@@ -50,7 +50,8 @@ internal static class JsonMirrorBundleWriter
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentNullException.ThrowIfNull(logger);
var mirrorOptions = options.Mirror ?? new JsonExportOptions.JsonMirrorOptions();
var cryptoHash = services.GetRequiredService<ICryptoHash>();
var mirrorOptions = options.Mirror ?? new JsonExportOptions.JsonMirrorOptions();
if (!mirrorOptions.Enabled || mirrorOptions.Domains.Count == 0)
{
return result;
@@ -123,7 +124,7 @@ internal static class JsonMirrorBundleWriter
await WriteFileAsync(bundlePath, bundleBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var bundleRelativePath = ToRelativePath(result.ExportDirectory, bundlePath);
var bundleDigest = ComputeDigest(bundleBytes);
var bundleDigest = ComputeDigest(cryptoHash, bundleBytes);
var bundleLength = (long)bundleBytes.LongLength;
additionalFiles.Add(new JsonExportFile(bundleRelativePath, bundleLength, bundleDigest));
@@ -142,7 +143,7 @@ internal static class JsonMirrorBundleWriter
await WriteFileAsync(signaturePath, signatureBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var signatureRelativePath = ToRelativePath(result.ExportDirectory, signaturePath);
var signatureDigest = ComputeDigest(signatureBytes);
var signatureDigest = ComputeDigest(cryptoHash, signatureBytes);
var signatureLength = (long)signatureBytes.LongLength;
additionalFiles.Add(new JsonExportFile(signatureRelativePath, signatureLength, signatureDigest));
@@ -170,7 +171,7 @@ internal static class JsonMirrorBundleWriter
await WriteFileAsync(manifestPath, manifestBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var manifestRelativePath = ToRelativePath(result.ExportDirectory, manifestPath);
var manifestDigest = ComputeDigest(manifestBytes);
var manifestDigest = ComputeDigest(cryptoHash, manifestBytes);
var manifestLength = (long)manifestBytes.LongLength;
additionalFiles.Add(new JsonExportFile(manifestRelativePath, manifestLength, manifestDigest));
@@ -198,7 +199,7 @@ internal static class JsonMirrorBundleWriter
await WriteFileAsync(indexPath, indexBytes, exportedAtUtc, cancellationToken).ConfigureAwait(false);
var indexRelativePath = ToRelativePath(result.ExportDirectory, indexPath);
var indexDigest = ComputeDigest(indexBytes);
var indexDigest = ComputeDigest(cryptoHash, indexBytes);
var indexLength = (long)indexBytes.LongLength;
additionalFiles.Add(new JsonExportFile(indexRelativePath, indexLength, indexDigest));
@@ -490,11 +491,11 @@ internal static class JsonMirrorBundleWriter
return relative.Replace(Path.DirectorySeparatorChar, '/');
}
private static string ComputeDigest(ReadOnlySpan<byte> payload)
{
var hash = SHA256.HashData(payload);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string ComputeDigest(ICryptoHash hash, ReadOnlySpan<byte> payload)
{
var hex = hash.ComputeHashHex(payload, HashAlgorithms.Sha256);
return $"sha256:{hex}";
}
private static void TrySetDirectoryTimestamp(string directory, DateTime exportedAtUtc)
{

View File

@@ -436,14 +436,14 @@ public sealed class VulnListJsonExportPathResolver : IJsonExportPathResolver
var invalid = Path.GetInvalidFileNameChars();
Span<char> buffer = stackalloc char[name.Length];
var count = 0;
foreach (var ch in name)
{
if (ch == '/' || ch == '\\' || Array.IndexOf(invalid, ch) >= 0)
{
buffer[count++] = '_';
}
else
{
foreach (var ch in name)
{
if (ch == '/' || ch == '\\' || ch == ':' || Array.IndexOf(invalid, ch) >= 0)
{
buffer[count++] = '_';
}
else
{
buffer[count++] = ch;
}
}

View File

@@ -12,4 +12,4 @@
|MERGE-LNM-21-001 Migration plan authoring|BE-Merge, Architecture Guild|CONCELIER-LNM-21-101|**DONE (2025-11-03)** Authored `docs/migration/no-merge.md` with rollout phases, backfill/validation checklists, rollback guidance, and ownership matrix for the Link-Not-Merge cutover.|
|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|**DONE (2025-11-07)** Feature flag now defaults to Link-Not-Merge mode (`NoMergeEnabled=true`) across options/config, analyzers enforce deprecation, and WebService option tests cover the regression; dotnet CLI validation still queued for a workstation with preview SDK.<br>2025-11-05 14:42Z: Implemented `concelier:features:noMergeEnabled` gate, merge job allowlist checks, `[Obsolete]` markings, and analyzer scaffolding to steer consumers toward linkset APIs.<br>2025-11-06 16:10Z: Introduced Roslyn analyzer (`CONCELIER0002`) referenced by Concelier WebService + tests, documented suppression guidance, and updated migration playbook.<br>2025-11-07 03:25Z: Default-on toggle + job gating surfacing ingestion test brittleness; guard logs capture requests missing `upstream.contentHash`.<br>2025-11-07 19:45Z: Set `ConcelierOptions.Features.NoMergeEnabled` default to `true`, added regression coverage (`Features_NoMergeEnabled_DefaultsToTrue`), and rechecked ingest helpers to carry canonical links before closing the task.|
> 2025-11-03: Catalogued call sites (WebService Program `AddMergeModule`, built-in job registration `merge:reconcile`, `MergeReconcileJob`) and confirmed unit tests are the only direct `MergeAsync` callers; next step is to define analyzer + replacement observability coverage.
|MERGE-LNM-21-003 Determinism/test updates|QA Guild, BE-Merge|MERGE-LNM-21-002|**DOING (2025-11-07)** Replacing legacy merge determinism harness with observation/linkset regression plan; tracking scenarios in `docs/dev/lnm-determinism-tests.md` before porting fixtures.<br>2025-11-07 20:05Z: Ported merge determinism fixture into `AdvisoryObservationFactoryTests.Create_IsDeterministicAcrossRuns` and removed the redundant merge integration test.|
|MERGE-LNM-21-003 Determinism/test updates|QA Guild, BE-Merge|MERGE-LNM-21-002|**DONE (2025-11-07)** Legacy merge determinism suite replaced by observation/linkset/export regressions. Added coverage across `AdvisoryObservationFactoryTests` (raw references + conflict notes), `AdvisoryEventLogTests` (sorted statement IDs), and `JsonExportSnapshotBuilderTests` (order-independent digests). `docs/dev/lnm-determinism-tests.md` updated to reflect parity.|

View File

@@ -21,14 +21,14 @@ public static class RawDocumentFactory
return new AdvisoryRawDocument(tenant, source, upstream, clonedContent, identifiers, linkset, advisoryKey, normalizedLinks, supersedes);
}
public static VexRawDocument CreateVex(
string tenant,
RawSourceMetadata source,
RawUpstreamMetadata upstream,
RawContent content,
RawLinkset linkset,
ImmutableArray<VexStatementSummary> statements,
string? supersedes = null)
public static VexRawDocument CreateVex(
string tenant,
RawSourceMetadata source,
RawUpstreamMetadata upstream,
RawContent content,
RawLinkset linkset,
ImmutableArray<VexStatementSummary>? statements = null,
string? supersedes = null)
{
var clonedContent = content with { Raw = Clone(content.Raw) };
return new VexRawDocument(tenant, source, upstream, clonedContent, linkset, statements, supersedes);

View File

@@ -3,15 +3,17 @@ using System.Text.Json.Serialization;
namespace StellaOps.Concelier.RawModels;
public sealed record VexRawDocument(
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("source")] RawSourceMetadata Source,
[property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream,
[property: JsonPropertyName("content")] RawContent Content,
[property: JsonPropertyName("linkset")] RawLinkset Linkset,
[property: JsonPropertyName("statements")] ImmutableArray<VexStatementSummary> Statements,
[property: JsonPropertyName("supersedes")] string? Supersedes = null)
{
public sealed record VexRawDocument(
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("source")] RawSourceMetadata Source,
[property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream,
[property: JsonPropertyName("content")] RawContent Content,
[property: JsonPropertyName("linkset")] RawLinkset Linkset,
[property: JsonPropertyName("statements")]
[property: JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
ImmutableArray<VexStatementSummary>? Statements = null,
[property: JsonPropertyName("supersedes")] string? Supersedes = null)
{
public VexRawDocument WithSupersedes(string supersedes)
=> this with { Supersedes = supersedes };
}

View File

@@ -126,7 +126,7 @@ public sealed class EnsureAdvisoryCanonicalKeyBackfillMigration : IMongoMigratio
return string.Empty;
}
return value.IsString ? value.AsString : value.ToString();
return value.IsString ? value.AsString : value.ToString() ?? string.Empty;
}
private static string? GetOptionalString(BsonDocument document, string name)
@@ -150,7 +150,7 @@ public sealed class EnsureAdvisoryCanonicalKeyBackfillMigration : IMongoMigratio
BsonInt32 i => i.AsInt32.ToString(CultureInfo.InvariantCulture),
BsonInt64 l => l.AsInt64.ToString(CultureInfo.InvariantCulture),
BsonDouble d => d.AsDouble.ToString(CultureInfo.InvariantCulture),
_ => value.ToString()
_ => value?.ToString() ?? string.Empty
};
}

View File

@@ -157,7 +157,7 @@ public sealed class EnsureAdvisoryObservationsRawLinksetMigration : IMongoMigrat
content,
identifiers,
linkset,
supersedes.IsBsonNull ? null : supersedes.AsString);
Supersedes: supersedes.IsBsonNull ? null : supersedes.AsString);
}
private static RawSourceMetadata MapSource(BsonDocument source)

View File

@@ -90,12 +90,27 @@ public sealed class MongoBootstrapper
_logger.LogInformation("Mongo bootstrapper completed");
}
private async Task<HashSet<string>> ListCollectionsAsync(CancellationToken cancellationToken)
{
using var cursor = await _database.ListCollectionNamesAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
var list = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
return new HashSet<string>(list, StringComparer.Ordinal);
}
private async Task<HashSet<string>> ListCollectionsAsync(CancellationToken cancellationToken)
{
using var cursor = await _database.ListCollectionNamesAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
var list = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
return new HashSet<string>(list, StringComparer.Ordinal);
}
private async Task<bool> CollectionIsViewAsync(string collectionName, CancellationToken cancellationToken)
{
var filter = Builders<BsonDocument>.Filter.Eq("name", collectionName);
var options = new ListCollectionsOptions { Filter = filter };
using var cursor = await _database.ListCollectionsAsync(options, cancellationToken).ConfigureAwait(false);
var collections = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
if (collections.Count == 0)
{
return false;
}
var typeValue = collections[0].GetValue("type", BsonString.Empty).AsString;
return string.Equals(typeValue, "view", StringComparison.OrdinalIgnoreCase);
}
private Task EnsureLocksIndexesAsync(CancellationToken cancellationToken)
{
@@ -129,9 +144,15 @@ public sealed class MongoBootstrapper
return collection.Indexes.CreateManyAsync(indexes, cancellationToken);
}
private Task EnsureAdvisoryIndexesAsync(CancellationToken cancellationToken)
{
var collection = _database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory);
private async Task EnsureAdvisoryIndexesAsync(CancellationToken cancellationToken)
{
if (await CollectionIsViewAsync(MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false))
{
_logger.LogDebug("Skipping advisory index creation because {Collection} is a view", MongoStorageDefaults.Collections.Advisory);
return;
}
var collection = _database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory);
var indexes = new List<CreateIndexModel<BsonDocument>>
{
new(
@@ -159,7 +180,7 @@ public sealed class MongoBootstrapper
new CreateIndexOptions { Name = "advisory_normalizedVersions_value", Sparse = true }));
}
return collection.Indexes.CreateManyAsync(indexes, cancellationToken);
await collection.Indexes.CreateManyAsync(indexes, cancellationToken).ConfigureAwait(false);
}
private Task EnsureDocumentsIndexesAsync(CancellationToken cancellationToken)

View File

@@ -1,16 +1,18 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Text;
using System.Linq;
using System.Text.Json;
using MongoDB.Bson;
using MongoDB.Driver;
using MongoDB.Bson.IO;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Raw;
using StellaOps.Concelier.RawModels;
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using MongoDB.Bson;
using MongoDB.Bson.IO;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Raw;
using StellaOps.Concelier.RawModels;
using StellaOps.Ingestion.Telemetry;
namespace StellaOps.Concelier.Storage.Mongo.Raw;
@@ -34,76 +36,115 @@ internal sealed class MongoAdvisoryRawRepository : IAdvisoryRawRepository
_collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
}
public async Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(document);
var tenant = document.Tenant;
var vendor = document.Source.Vendor;
var upstreamId = document.Upstream.UpstreamId;
var contentHash = document.Upstream.ContentHash;
var baseFilter = Builders<BsonDocument>.Filter.Eq("tenant", tenant) &
Builders<BsonDocument>.Filter.Eq("source.vendor", vendor) &
Builders<BsonDocument>.Filter.Eq("upstream.upstream_id", upstreamId);
public async Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(document);
var tenant = document.Tenant;
var vendor = document.Source.Vendor;
var upstreamId = document.Upstream.UpstreamId;
var contentHash = document.Upstream.ContentHash;
var sourceUri = ResolveProvenanceUri(document);
var baseFilter = Builders<BsonDocument>.Filter.Eq("tenant", tenant) &
Builders<BsonDocument>.Filter.Eq("source.vendor", vendor) &
Builders<BsonDocument>.Filter.Eq("upstream.upstream_id", upstreamId);
var duplicateFilter = baseFilter &
Builders<BsonDocument>.Filter.Eq("upstream.content_hash", contentHash);
var duplicate = await _collection
.Find(duplicateFilter)
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (duplicate is not null)
{
var existing = MapToRecord(duplicate);
return new AdvisoryRawUpsertResult(false, existing);
}
var previous = await _collection
.Find(baseFilter)
.Sort(Builders<BsonDocument>.Sort.Descending("ingested_at").Descending("_id"))
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
var supersedesId = previous?["_id"]?.AsString;
var recordDocument = CreateBsonDocument(document, supersedesId);
try
{
await _collection.InsertOneAsync(recordDocument, cancellationToken: cancellationToken).ConfigureAwait(false);
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
_logger.LogWarning(
ex,
"Duplicate key detected while inserting advisory_raw document tenant={Tenant} vendor={Vendor} upstream={Upstream} hash={Hash}",
tenant,
vendor,
upstreamId,
contentHash);
var existingDoc = await _collection
.Find(duplicateFilter)
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (existingDoc is not null)
{
var existing = MapToRecord(existingDoc);
return new AdvisoryRawUpsertResult(false, existing);
}
throw;
}
var inserted = MapToRecord(recordDocument);
return new AdvisoryRawUpsertResult(true, inserted);
}
using var fetchActivity = IngestionTelemetry.StartFetchActivity(tenant, vendor, upstreamId, contentHash, sourceUri);
var fetchWatch = Stopwatch.StartNew();
var duplicate = await _collection
.Find(duplicateFilter)
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (duplicate is not null)
{
fetchWatch.Stop();
fetchActivity?.SetTag("result", "duplicate");
fetchActivity?.SetStatus(ActivityStatusCode.Ok);
IngestionTelemetry.RecordLatency(tenant, vendor, IngestionTelemetry.PhaseFetch, fetchWatch.Elapsed);
var existing = MapToRecord(duplicate);
return new AdvisoryRawUpsertResult(false, existing);
}
var previous = await _collection
.Find(baseFilter)
.Sort(Builders<BsonDocument>.Sort.Descending("ingested_at").Descending("_id"))
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
fetchWatch.Stop();
fetchActivity?.SetTag("result", previous is null ? "new" : "supersede");
fetchActivity?.SetStatus(ActivityStatusCode.Ok);
IngestionTelemetry.RecordLatency(tenant, vendor, IngestionTelemetry.PhaseFetch, fetchWatch.Elapsed);
var supersedesId = previous?["_id"]?.AsString;
var recordDocument = CreateBsonDocument(document, supersedesId);
var writeWatch = Stopwatch.StartNew();
using var writeActivity = IngestionTelemetry.StartWriteActivity(tenant, vendor, upstreamId, contentHash, MongoStorageDefaults.Collections.AdvisoryRaw);
try
{
await _collection.InsertOneAsync(recordDocument, cancellationToken: cancellationToken).ConfigureAwait(false);
writeActivity?.SetTag("result", IngestionTelemetry.ResultOk);
writeActivity?.SetStatus(ActivityStatusCode.Ok);
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
writeActivity?.SetTag("result", IngestionTelemetry.ResultNoop);
writeActivity?.SetStatus(ActivityStatusCode.Error, "duplicate_key");
_logger.LogWarning(
ex,
"Duplicate key detected while inserting advisory_raw document tenant={Tenant} vendor={Vendor} upstream={Upstream} hash={Hash}",
tenant,
vendor,
upstreamId,
contentHash);
var existingDoc = await _collection
.Find(duplicateFilter)
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (existingDoc is not null)
{
var existing = MapToRecord(existingDoc);
return new AdvisoryRawUpsertResult(false, existing);
}
throw;
}
finally
{
writeWatch.Stop();
IngestionTelemetry.RecordLatency(tenant, vendor, IngestionTelemetry.PhaseWrite, writeWatch.Elapsed);
}
var inserted = MapToRecord(recordDocument);
return new AdvisoryRawUpsertResult(true, inserted);
}
private static string? ResolveProvenanceUri(AdvisoryRawDocument document)
{
if (document.Upstream?.Provenance is null)
{
return null;
}
return document.Upstream.Provenance.TryGetValue("uri", out var uri) && !string.IsNullOrWhiteSpace(uri)
? uri
: null;
}
public async Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken)
{

View File

@@ -11,8 +11,9 @@
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
</ItemGroup>
</Project>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Ingestion.Telemetry\StellaOps.Ingestion.Telemetry.csproj" />
</ItemGroup>
</Project>