up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
cryptopro-linux-csp / build-and-test (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
sm-remote-ci / build-and-test (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-12-09 09:38:09 +02:00
parent bc0762e97d
commit 108d1c64b3
193 changed files with 7265 additions and 13029 deletions

View File

@@ -16,6 +16,7 @@ Connector responsible for ingesting Cisco CSAF VEX advisories and handing raw do
## In/Out of scope
In: data fetching, provider metadata, retry controls, raw document persistence.
Out: normalization/export, attestation, Mongo wiring (handled in other modules).
Out: normalization/export, attestation, Postgres/in-memory wiring (handled in other modules).
## Observability & security expectations
- Log fetch batches with document counts/durations; mask credentials.
- Emit metrics for rate-limit hits, retries, and quarantine events.

View File

@@ -11,68 +11,68 @@ using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.Cisco.CSAF.Configuration;
using StellaOps.Excititor.Connectors.Cisco.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
namespace StellaOps.Excititor.Connectors.Cisco.CSAF;
public sealed class CiscoCsafConnector : VexConnectorBase
{
private static readonly VexConnectorDescriptor DescriptorInstance = new(
id: "excititor:cisco",
kind: VexProviderKind.Vendor,
displayName: "Cisco CSAF")
{
Tags = ImmutableArray.Create("cisco", "csaf"),
};
private readonly CiscoProviderMetadataLoader _metadataLoader;
private readonly IHttpClientFactory _httpClientFactory;
private readonly IVexConnectorStateRepository _stateRepository;
private readonly IEnumerable<IVexConnectorOptionsValidator<CiscoConnectorOptions>> _validators;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web);
private CiscoConnectorOptions? _options;
private CiscoProviderMetadataResult? _providerMetadata;
public CiscoCsafConnector(
CiscoProviderMetadataLoader metadataLoader,
IHttpClientFactory httpClientFactory,
IVexConnectorStateRepository stateRepository,
IEnumerable<IVexConnectorOptionsValidator<CiscoConnectorOptions>>? validators,
ILogger<CiscoCsafConnector> logger,
TimeProvider timeProvider)
: base(DescriptorInstance, logger, timeProvider)
{
_metadataLoader = metadataLoader ?? throw new ArgumentNullException(nameof(metadataLoader));
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_validators = validators ?? Array.Empty<IVexConnectorOptionsValidator<CiscoConnectorOptions>>();
}
public override async ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken)
{
_options = VexConnectorOptionsBinder.Bind(
Descriptor,
settings,
validators: _validators);
_providerMetadata = await _metadataLoader.LoadAsync(cancellationToken).ConfigureAwait(false);
LogConnectorEvent(LogLevel.Information, "validate", "Cisco CSAF metadata loaded.", new Dictionary<string, object?>
{
["baseUriCount"] = _providerMetadata.Provider.BaseUris.Length,
["fromOffline"] = _providerMetadata.FromOfflineSnapshot,
});
}
public override async IAsyncEnumerable<VexRawDocument> FetchAsync(VexConnectorContext context, [EnumeratorCancellation] CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
if (_options is null)
{
throw new InvalidOperationException("Connector must be validated before fetch operations.");
}
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.Cisco.CSAF;
public sealed class CiscoCsafConnector : VexConnectorBase
{
private static readonly VexConnectorDescriptor DescriptorInstance = new(
id: "excititor:cisco",
kind: VexProviderKind.Vendor,
displayName: "Cisco CSAF")
{
Tags = ImmutableArray.Create("cisco", "csaf"),
};
private readonly CiscoProviderMetadataLoader _metadataLoader;
private readonly IHttpClientFactory _httpClientFactory;
private readonly IVexConnectorStateRepository _stateRepository;
private readonly IEnumerable<IVexConnectorOptionsValidator<CiscoConnectorOptions>> _validators;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web);
private CiscoConnectorOptions? _options;
private CiscoProviderMetadataResult? _providerMetadata;
public CiscoCsafConnector(
CiscoProviderMetadataLoader metadataLoader,
IHttpClientFactory httpClientFactory,
IVexConnectorStateRepository stateRepository,
IEnumerable<IVexConnectorOptionsValidator<CiscoConnectorOptions>>? validators,
ILogger<CiscoCsafConnector> logger,
TimeProvider timeProvider)
: base(DescriptorInstance, logger, timeProvider)
{
_metadataLoader = metadataLoader ?? throw new ArgumentNullException(nameof(metadataLoader));
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_validators = validators ?? Array.Empty<IVexConnectorOptionsValidator<CiscoConnectorOptions>>();
}
public override async ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken)
{
_options = VexConnectorOptionsBinder.Bind(
Descriptor,
settings,
validators: _validators);
_providerMetadata = await _metadataLoader.LoadAsync(cancellationToken).ConfigureAwait(false);
LogConnectorEvent(LogLevel.Information, "validate", "Cisco CSAF metadata loaded.", new Dictionary<string, object?>
{
["baseUriCount"] = _providerMetadata.Provider.BaseUris.Length,
["fromOffline"] = _providerMetadata.FromOfflineSnapshot,
});
}
public override async IAsyncEnumerable<VexRawDocument> FetchAsync(VexConnectorContext context, [EnumeratorCancellation] CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
if (_options is null)
{
throw new InvalidOperationException("Connector must be validated before fetch operations.");
}
if (_providerMetadata is null)
{
_providerMetadata = await _metadataLoader.LoadAsync(cancellationToken).ConfigureAwait(false);
@@ -81,28 +81,28 @@ public sealed class CiscoCsafConnector : VexConnectorBase
await UpsertProviderAsync(context.Services, _providerMetadata.Provider, cancellationToken).ConfigureAwait(false);
var state = await _stateRepository.GetAsync(Descriptor.Id, cancellationToken).ConfigureAwait(false);
var knownDigests = state?.DocumentDigests ?? ImmutableArray<string>.Empty;
var digestSet = new HashSet<string>(knownDigests, StringComparer.OrdinalIgnoreCase);
var digestList = new List<string>(knownDigests);
var since = context.Since ?? state?.LastUpdated ?? DateTimeOffset.MinValue;
var latestTimestamp = state?.LastUpdated ?? since;
var stateChanged = false;
var client = _httpClientFactory.CreateClient(CiscoConnectorOptions.HttpClientName);
foreach (var directory in _providerMetadata.Provider.BaseUris)
{
await foreach (var advisory in EnumerateCatalogAsync(client, directory, cancellationToken).ConfigureAwait(false))
{
var published = advisory.LastModified ?? advisory.Published ?? DateTimeOffset.MinValue;
if (published <= since)
{
continue;
}
using var contentResponse = await client.GetAsync(advisory.DocumentUri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
contentResponse.EnsureSuccessStatusCode();
var payload = await contentResponse.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
var knownDigests = state?.DocumentDigests ?? ImmutableArray<string>.Empty;
var digestSet = new HashSet<string>(knownDigests, StringComparer.OrdinalIgnoreCase);
var digestList = new List<string>(knownDigests);
var since = context.Since ?? state?.LastUpdated ?? DateTimeOffset.MinValue;
var latestTimestamp = state?.LastUpdated ?? since;
var stateChanged = false;
var client = _httpClientFactory.CreateClient(CiscoConnectorOptions.HttpClientName);
foreach (var directory in _providerMetadata.Provider.BaseUris)
{
await foreach (var advisory in EnumerateCatalogAsync(client, directory, cancellationToken).ConfigureAwait(false))
{
var published = advisory.LastModified ?? advisory.Published ?? DateTimeOffset.MinValue;
if (published <= since)
{
continue;
}
using var contentResponse = await client.GetAsync(advisory.DocumentUri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
contentResponse.EnsureSuccessStatusCode();
var payload = await contentResponse.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
var metadata = BuildMetadata(builder =>
{
builder
@@ -120,118 +120,118 @@ public sealed class CiscoCsafConnector : VexConnectorBase
advisory.DocumentUri,
payload,
metadata);
if (!digestSet.Add(rawDocument.Digest))
{
continue;
}
await context.RawSink.StoreAsync(rawDocument, cancellationToken).ConfigureAwait(false);
digestList.Add(rawDocument.Digest);
stateChanged = true;
if (published > latestTimestamp)
{
latestTimestamp = published;
}
yield return rawDocument;
}
}
if (stateChanged)
{
var baseState = state ?? new VexConnectorState(
Descriptor.Id,
null,
ImmutableArray<string>.Empty,
ImmutableDictionary<string, string>.Empty,
null,
0,
null,
null);
var newState = baseState with
{
LastUpdated = latestTimestamp == DateTimeOffset.MinValue ? state?.LastUpdated : latestTimestamp,
DocumentDigests = digestList.ToImmutableArray(),
};
await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false);
}
}
public override ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> throw new NotSupportedException("CiscoCsafConnector relies on CSAF normalizers for document processing.");
private async IAsyncEnumerable<CiscoAdvisoryEntry> EnumerateCatalogAsync(HttpClient client, Uri directory, [EnumeratorCancellation] CancellationToken cancellationToken)
{
var nextUri = BuildIndexUri(directory, null);
while (nextUri is not null)
{
using var response = await client.GetAsync(nextUri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
var page = JsonSerializer.Deserialize<CiscoAdvisoryIndex>(json, _serializerOptions);
if (page?.Advisories is null)
{
yield break;
}
foreach (var advisory in page.Advisories)
{
if (string.IsNullOrWhiteSpace(advisory.Url))
{
continue;
}
if (!Uri.TryCreate(advisory.Url, UriKind.RelativeOrAbsolute, out var documentUri))
{
continue;
}
if (!documentUri.IsAbsoluteUri)
{
documentUri = new Uri(directory, documentUri);
}
yield return new CiscoAdvisoryEntry(
advisory.Id ?? documentUri.Segments.LastOrDefault()?.Trim('/') ?? documentUri.ToString(),
documentUri,
advisory.Revision,
advisory.Published,
advisory.LastModified,
advisory.Sha256);
}
nextUri = ResolveNextUri(directory, page.Next);
}
}
private static Uri BuildIndexUri(Uri directory, string? relative)
{
if (string.IsNullOrWhiteSpace(relative))
{
var baseText = directory.ToString();
if (!baseText.EndsWith('/'))
{
baseText += "/";
}
return new Uri(new Uri(baseText, UriKind.Absolute), "index.json");
}
if (Uri.TryCreate(relative, UriKind.Absolute, out var absolute))
{
return absolute;
}
var baseTextRelative = directory.ToString();
if (!baseTextRelative.EndsWith('/'))
{
baseTextRelative += "/";
}
return new Uri(new Uri(baseTextRelative, UriKind.Absolute), relative);
}
if (!digestSet.Add(rawDocument.Digest))
{
continue;
}
await context.RawSink.StoreAsync(rawDocument, cancellationToken).ConfigureAwait(false);
digestList.Add(rawDocument.Digest);
stateChanged = true;
if (published > latestTimestamp)
{
latestTimestamp = published;
}
yield return rawDocument;
}
}
if (stateChanged)
{
var baseState = state ?? new VexConnectorState(
Descriptor.Id,
null,
ImmutableArray<string>.Empty,
ImmutableDictionary<string, string>.Empty,
null,
0,
null,
null);
var newState = baseState with
{
LastUpdated = latestTimestamp == DateTimeOffset.MinValue ? state?.LastUpdated : latestTimestamp,
DocumentDigests = digestList.ToImmutableArray(),
};
await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false);
}
}
public override ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> throw new NotSupportedException("CiscoCsafConnector relies on CSAF normalizers for document processing.");
private async IAsyncEnumerable<CiscoAdvisoryEntry> EnumerateCatalogAsync(HttpClient client, Uri directory, [EnumeratorCancellation] CancellationToken cancellationToken)
{
var nextUri = BuildIndexUri(directory, null);
while (nextUri is not null)
{
using var response = await client.GetAsync(nextUri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var json = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
var page = JsonSerializer.Deserialize<CiscoAdvisoryIndex>(json, _serializerOptions);
if (page?.Advisories is null)
{
yield break;
}
foreach (var advisory in page.Advisories)
{
if (string.IsNullOrWhiteSpace(advisory.Url))
{
continue;
}
if (!Uri.TryCreate(advisory.Url, UriKind.RelativeOrAbsolute, out var documentUri))
{
continue;
}
if (!documentUri.IsAbsoluteUri)
{
documentUri = new Uri(directory, documentUri);
}
yield return new CiscoAdvisoryEntry(
advisory.Id ?? documentUri.Segments.LastOrDefault()?.Trim('/') ?? documentUri.ToString(),
documentUri,
advisory.Revision,
advisory.Published,
advisory.LastModified,
advisory.Sha256);
}
nextUri = ResolveNextUri(directory, page.Next);
}
}
private static Uri BuildIndexUri(Uri directory, string? relative)
{
if (string.IsNullOrWhiteSpace(relative))
{
var baseText = directory.ToString();
if (!baseText.EndsWith('/'))
{
baseText += "/";
}
return new Uri(new Uri(baseText, UriKind.Absolute), "index.json");
}
if (Uri.TryCreate(relative, UriKind.Absolute, out var absolute))
{
return absolute;
}
var baseTextRelative = directory.ToString();
if (!baseTextRelative.EndsWith('/'))
{
baseTextRelative += "/";
}
return new Uri(new Uri(baseTextRelative, UriKind.Absolute), relative);
}
private static Uri? ResolveNextUri(Uri directory, string? next)
{
if (string.IsNullOrWhiteSpace(next))
@@ -285,24 +285,24 @@ public sealed class CiscoCsafConnector : VexConnectorBase
private sealed record CiscoAdvisoryIndex
{
public List<CiscoAdvisory>? Advisories { get; init; }
public string? Next { get; init; }
}
private sealed record CiscoAdvisory
{
public string? Id { get; init; }
public string? Url { get; init; }
public string? Revision { get; init; }
public DateTimeOffset? Published { get; init; }
public DateTimeOffset? LastModified { get; init; }
public string? Sha256 { get; init; }
}
private sealed record CiscoAdvisoryEntry(
string Id,
Uri DocumentUri,
string? Revision,
DateTimeOffset? Published,
DateTimeOffset? LastModified,
string? Sha256);
}
public string? Next { get; init; }
}
private sealed record CiscoAdvisory
{
public string? Id { get; init; }
public string? Url { get; init; }
public string? Revision { get; init; }
public DateTimeOffset? Published { get; init; }
public DateTimeOffset? LastModified { get; init; }
public string? Sha256 { get; init; }
}
private sealed record CiscoAdvisoryEntry(
string Id,
Uri DocumentUri,
string? Revision,
DateTimeOffset? Published,
DateTimeOffset? LastModified,
string? Sha256);
}

View File

@@ -9,7 +9,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />

View File

@@ -16,7 +16,7 @@ using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication;
using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.MSRC.CSAF;

View File

@@ -8,7 +8,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />

View File

@@ -1,266 +1,266 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration;
using StellaOps.Excititor.Connectors.Oracle.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
namespace StellaOps.Excititor.Connectors.Oracle.CSAF;
public sealed class OracleCsafConnector : VexConnectorBase
{
private static readonly VexConnectorDescriptor DescriptorInstance = new(
id: "excititor:oracle",
kind: VexProviderKind.Vendor,
displayName: "Oracle CSAF")
{
Tags = ImmutableArray.Create("oracle", "csaf", "cpu"),
};
private readonly OracleCatalogLoader _catalogLoader;
private readonly IHttpClientFactory _httpClientFactory;
private readonly IVexConnectorStateRepository _stateRepository;
private readonly IEnumerable<IVexConnectorOptionsValidator<OracleConnectorOptions>> _validators;
private OracleConnectorOptions? _options;
private OracleCatalogResult? _catalog;
public OracleCsafConnector(
OracleCatalogLoader catalogLoader,
IHttpClientFactory httpClientFactory,
IVexConnectorStateRepository stateRepository,
IEnumerable<IVexConnectorOptionsValidator<OracleConnectorOptions>> validators,
ILogger<OracleCsafConnector> logger,
TimeProvider timeProvider)
: base(DescriptorInstance, logger, timeProvider)
{
_catalogLoader = catalogLoader ?? throw new ArgumentNullException(nameof(catalogLoader));
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_validators = validators ?? Array.Empty<IVexConnectorOptionsValidator<OracleConnectorOptions>>();
}
public override async ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken)
{
_options = VexConnectorOptionsBinder.Bind(
Descriptor,
settings,
validators: _validators);
_catalog = await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false);
LogConnectorEvent(LogLevel.Information, "validate", "Oracle CSAF catalogue loaded.", new Dictionary<string, object?>
{
["catalogEntryCount"] = _catalog.Metadata.Entries.Length,
["scheduleCount"] = _catalog.Metadata.CpuSchedule.Length,
["fromOffline"] = _catalog.FromOfflineSnapshot,
});
}
public override async IAsyncEnumerable<VexRawDocument> FetchAsync(VexConnectorContext context, [EnumeratorCancellation] CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
if (_options is null)
{
throw new InvalidOperationException("Connector must be validated before fetch operations.");
}
_catalog ??= await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false);
var entries = _catalog.Metadata.Entries
.OrderBy(static entry => entry.PublishedAt == default ? DateTimeOffset.MinValue : entry.PublishedAt)
.ToImmutableArray();
var state = await _stateRepository.GetAsync(Descriptor.Id, cancellationToken).ConfigureAwait(false);
var since = ResolveSince(context.Since, state?.LastUpdated);
var knownDigests = state?.DocumentDigests ?? ImmutableArray<string>.Empty;
var digestSet = new HashSet<string>(knownDigests, StringComparer.OrdinalIgnoreCase);
var digestList = new List<string>(knownDigests);
var latestPublished = state?.LastUpdated ?? since ?? DateTimeOffset.MinValue;
var stateChanged = false;
var client = _httpClientFactory.CreateClient(OracleConnectorOptions.HttpClientName);
LogConnectorEvent(LogLevel.Information, "fetch.begin", "Starting Oracle CSAF catalogue iteration.", new Dictionary<string, object?>
{
["since"] = since?.ToString("O"),
["entryCount"] = entries.Length,
});
foreach (var entry in entries)
{
cancellationToken.ThrowIfCancellationRequested();
if (ShouldSkipEntry(entry, since))
{
continue;
}
var expectedDigest = NormalizeDigest(entry.Sha256);
if (expectedDigest is not null && digestSet.Contains(expectedDigest))
{
latestPublished = UpdateLatest(latestPublished, entry.PublishedAt);
LogConnectorEvent(LogLevel.Debug, "fetch.skip.cached", "Skipping Oracle CSAF entry because digest already processed.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["digest"] = expectedDigest,
});
continue;
}
var rawDocument = await DownloadEntryAsync(client, entry, cancellationToken).ConfigureAwait(false);
if (rawDocument is null)
{
continue;
}
if (expectedDigest is not null && !string.Equals(rawDocument.Digest, expectedDigest, StringComparison.OrdinalIgnoreCase))
{
LogConnectorEvent(LogLevel.Warning, "fetch.checksum_mismatch", "Oracle CSAF document checksum mismatch; document skipped.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["expected"] = expectedDigest,
["actual"] = rawDocument.Digest,
["documentUri"] = entry.DocumentUri.ToString(),
});
continue;
}
if (!digestSet.Add(rawDocument.Digest))
{
LogConnectorEvent(LogLevel.Debug, "fetch.skip.duplicate", "Oracle CSAF document digest already ingested.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["digest"] = rawDocument.Digest,
});
continue;
}
await context.RawSink.StoreAsync(rawDocument, cancellationToken).ConfigureAwait(false);
digestList.Add(rawDocument.Digest);
stateChanged = true;
latestPublished = UpdateLatest(latestPublished, entry.PublishedAt);
LogConnectorEvent(LogLevel.Information, "fetch.document_ingested", "Oracle CSAF document stored.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["digest"] = rawDocument.Digest,
["documentUri"] = entry.DocumentUri.ToString(),
["publishedAt"] = entry.PublishedAt.ToString("O"),
});
yield return rawDocument;
if (_options.RequestDelay > TimeSpan.Zero)
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
}
if (stateChanged)
{
var baseState = state ?? new VexConnectorState(
Descriptor.Id,
null,
ImmutableArray<string>.Empty,
ImmutableDictionary<string, string>.Empty,
null,
0,
null,
null);
var newState = baseState with
{
LastUpdated = latestPublished == DateTimeOffset.MinValue ? baseState.LastUpdated : latestPublished,
DocumentDigests = digestList.ToImmutableArray(),
};
await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false);
}
var ingestedCount = digestList.Count - knownDigests.Length;
LogConnectorEvent(LogLevel.Information, "fetch.complete", "Oracle CSAF fetch completed.", new Dictionary<string, object?>
{
["stateChanged"] = stateChanged,
["documentsProcessed"] = ingestedCount,
["latestPublished"] = latestPublished == DateTimeOffset.MinValue ? null : latestPublished.ToString("O"),
});
}
public override ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> throw new NotSupportedException("OracleCsafConnector relies on dedicated CSAF normalizers.");
public OracleCatalogResult? GetCachedCatalog() => _catalog;
private static DateTimeOffset? ResolveSince(DateTimeOffset? contextSince, DateTimeOffset? stateSince)
{
if (contextSince is null)
{
return stateSince;
}
if (stateSince is null)
{
return contextSince;
}
return stateSince > contextSince ? stateSince : contextSince;
}
private static bool ShouldSkipEntry(OracleCatalogEntry entry, DateTimeOffset? since)
{
if (since is null)
{
return false;
}
if (entry.PublishedAt == default)
{
return false;
}
return entry.PublishedAt <= since;
}
private async Task<VexRawDocument?> DownloadEntryAsync(HttpClient client, OracleCatalogEntry entry, CancellationToken cancellationToken)
{
if (entry.DocumentUri is null)
{
LogConnectorEvent(LogLevel.Warning, "fetch.skip.missing_uri", "Oracle CSAF entry missing document URI; skipping.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
});
return null;
}
var payload = await DownloadWithRetryAsync(client, entry.DocumentUri, cancellationToken).ConfigureAwait(false);
if (payload is null)
{
return null;
}
var metadata = BuildMetadata(builder =>
{
builder.Add("oracle.csaf.entryId", entry.Id);
builder.Add("oracle.csaf.title", entry.Title);
builder.Add("oracle.csaf.revision", entry.Revision);
if (entry.PublishedAt != default)
{
builder.Add("oracle.csaf.published", entry.PublishedAt.ToString("O"));
}
builder.Add("oracle.csaf.sha256", NormalizeDigest(entry.Sha256));
builder.Add("oracle.csaf.size", entry.Size?.ToString(CultureInfo.InvariantCulture));
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.Oracle.CSAF;
public sealed class OracleCsafConnector : VexConnectorBase
{
private static readonly VexConnectorDescriptor DescriptorInstance = new(
id: "excititor:oracle",
kind: VexProviderKind.Vendor,
displayName: "Oracle CSAF")
{
Tags = ImmutableArray.Create("oracle", "csaf", "cpu"),
};
private readonly OracleCatalogLoader _catalogLoader;
private readonly IHttpClientFactory _httpClientFactory;
private readonly IVexConnectorStateRepository _stateRepository;
private readonly IEnumerable<IVexConnectorOptionsValidator<OracleConnectorOptions>> _validators;
private OracleConnectorOptions? _options;
private OracleCatalogResult? _catalog;
public OracleCsafConnector(
OracleCatalogLoader catalogLoader,
IHttpClientFactory httpClientFactory,
IVexConnectorStateRepository stateRepository,
IEnumerable<IVexConnectorOptionsValidator<OracleConnectorOptions>> validators,
ILogger<OracleCsafConnector> logger,
TimeProvider timeProvider)
: base(DescriptorInstance, logger, timeProvider)
{
_catalogLoader = catalogLoader ?? throw new ArgumentNullException(nameof(catalogLoader));
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
_validators = validators ?? Array.Empty<IVexConnectorOptionsValidator<OracleConnectorOptions>>();
}
public override async ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken)
{
_options = VexConnectorOptionsBinder.Bind(
Descriptor,
settings,
validators: _validators);
_catalog = await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false);
LogConnectorEvent(LogLevel.Information, "validate", "Oracle CSAF catalogue loaded.", new Dictionary<string, object?>
{
["catalogEntryCount"] = _catalog.Metadata.Entries.Length,
["scheduleCount"] = _catalog.Metadata.CpuSchedule.Length,
["fromOffline"] = _catalog.FromOfflineSnapshot,
});
}
public override async IAsyncEnumerable<VexRawDocument> FetchAsync(VexConnectorContext context, [EnumeratorCancellation] CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
if (_options is null)
{
throw new InvalidOperationException("Connector must be validated before fetch operations.");
}
_catalog ??= await _catalogLoader.LoadAsync(_options, cancellationToken).ConfigureAwait(false);
var entries = _catalog.Metadata.Entries
.OrderBy(static entry => entry.PublishedAt == default ? DateTimeOffset.MinValue : entry.PublishedAt)
.ToImmutableArray();
var state = await _stateRepository.GetAsync(Descriptor.Id, cancellationToken).ConfigureAwait(false);
var since = ResolveSince(context.Since, state?.LastUpdated);
var knownDigests = state?.DocumentDigests ?? ImmutableArray<string>.Empty;
var digestSet = new HashSet<string>(knownDigests, StringComparer.OrdinalIgnoreCase);
var digestList = new List<string>(knownDigests);
var latestPublished = state?.LastUpdated ?? since ?? DateTimeOffset.MinValue;
var stateChanged = false;
var client = _httpClientFactory.CreateClient(OracleConnectorOptions.HttpClientName);
LogConnectorEvent(LogLevel.Information, "fetch.begin", "Starting Oracle CSAF catalogue iteration.", new Dictionary<string, object?>
{
["since"] = since?.ToString("O"),
["entryCount"] = entries.Length,
});
foreach (var entry in entries)
{
cancellationToken.ThrowIfCancellationRequested();
if (ShouldSkipEntry(entry, since))
{
continue;
}
var expectedDigest = NormalizeDigest(entry.Sha256);
if (expectedDigest is not null && digestSet.Contains(expectedDigest))
{
latestPublished = UpdateLatest(latestPublished, entry.PublishedAt);
LogConnectorEvent(LogLevel.Debug, "fetch.skip.cached", "Skipping Oracle CSAF entry because digest already processed.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["digest"] = expectedDigest,
});
continue;
}
var rawDocument = await DownloadEntryAsync(client, entry, cancellationToken).ConfigureAwait(false);
if (rawDocument is null)
{
continue;
}
if (expectedDigest is not null && !string.Equals(rawDocument.Digest, expectedDigest, StringComparison.OrdinalIgnoreCase))
{
LogConnectorEvent(LogLevel.Warning, "fetch.checksum_mismatch", "Oracle CSAF document checksum mismatch; document skipped.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["expected"] = expectedDigest,
["actual"] = rawDocument.Digest,
["documentUri"] = entry.DocumentUri.ToString(),
});
continue;
}
if (!digestSet.Add(rawDocument.Digest))
{
LogConnectorEvent(LogLevel.Debug, "fetch.skip.duplicate", "Oracle CSAF document digest already ingested.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["digest"] = rawDocument.Digest,
});
continue;
}
await context.RawSink.StoreAsync(rawDocument, cancellationToken).ConfigureAwait(false);
digestList.Add(rawDocument.Digest);
stateChanged = true;
latestPublished = UpdateLatest(latestPublished, entry.PublishedAt);
LogConnectorEvent(LogLevel.Information, "fetch.document_ingested", "Oracle CSAF document stored.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
["digest"] = rawDocument.Digest,
["documentUri"] = entry.DocumentUri.ToString(),
["publishedAt"] = entry.PublishedAt.ToString("O"),
});
yield return rawDocument;
if (_options.RequestDelay > TimeSpan.Zero)
{
await Task.Delay(_options.RequestDelay, cancellationToken).ConfigureAwait(false);
}
}
if (stateChanged)
{
var baseState = state ?? new VexConnectorState(
Descriptor.Id,
null,
ImmutableArray<string>.Empty,
ImmutableDictionary<string, string>.Empty,
null,
0,
null,
null);
var newState = baseState with
{
LastUpdated = latestPublished == DateTimeOffset.MinValue ? baseState.LastUpdated : latestPublished,
DocumentDigests = digestList.ToImmutableArray(),
};
await _stateRepository.SaveAsync(newState, cancellationToken).ConfigureAwait(false);
}
var ingestedCount = digestList.Count - knownDigests.Length;
LogConnectorEvent(LogLevel.Information, "fetch.complete", "Oracle CSAF fetch completed.", new Dictionary<string, object?>
{
["stateChanged"] = stateChanged,
["documentsProcessed"] = ingestedCount,
["latestPublished"] = latestPublished == DateTimeOffset.MinValue ? null : latestPublished.ToString("O"),
});
}
public override ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken)
=> throw new NotSupportedException("OracleCsafConnector relies on dedicated CSAF normalizers.");
public OracleCatalogResult? GetCachedCatalog() => _catalog;
private static DateTimeOffset? ResolveSince(DateTimeOffset? contextSince, DateTimeOffset? stateSince)
{
if (contextSince is null)
{
return stateSince;
}
if (stateSince is null)
{
return contextSince;
}
return stateSince > contextSince ? stateSince : contextSince;
}
private static bool ShouldSkipEntry(OracleCatalogEntry entry, DateTimeOffset? since)
{
if (since is null)
{
return false;
}
if (entry.PublishedAt == default)
{
return false;
}
return entry.PublishedAt <= since;
}
private async Task<VexRawDocument?> DownloadEntryAsync(HttpClient client, OracleCatalogEntry entry, CancellationToken cancellationToken)
{
if (entry.DocumentUri is null)
{
LogConnectorEvent(LogLevel.Warning, "fetch.skip.missing_uri", "Oracle CSAF entry missing document URI; skipping.", new Dictionary<string, object?>
{
["entryId"] = entry.Id,
});
return null;
}
var payload = await DownloadWithRetryAsync(client, entry.DocumentUri, cancellationToken).ConfigureAwait(false);
if (payload is null)
{
return null;
}
var metadata = BuildMetadata(builder =>
{
builder.Add("oracle.csaf.entryId", entry.Id);
builder.Add("oracle.csaf.title", entry.Title);
builder.Add("oracle.csaf.revision", entry.Revision);
if (entry.PublishedAt != default)
{
builder.Add("oracle.csaf.published", entry.PublishedAt.ToString("O"));
}
builder.Add("oracle.csaf.sha256", NormalizeDigest(entry.Sha256));
builder.Add("oracle.csaf.size", entry.Size?.ToString(CultureInfo.InvariantCulture));
if (!entry.Products.IsDefaultOrEmpty)
{
builder.Add("oracle.csaf.products", string.Join(",", entry.Products));
@@ -268,96 +268,96 @@ public sealed class OracleCsafConnector : VexConnectorBase
ConnectorSignerMetadataEnricher.Enrich(builder, Descriptor.Id, _logger);
});
return CreateRawDocument(VexDocumentFormat.Csaf, entry.DocumentUri, payload.AsMemory(), metadata);
}
private async Task<byte[]?> DownloadWithRetryAsync(HttpClient client, Uri uri, CancellationToken cancellationToken)
{
const int maxAttempts = 3;
var delay = TimeSpan.FromSeconds(1);
for (var attempt = 1; attempt <= maxAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
using var response = await client.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
if (IsTransient(response.StatusCode) && attempt < maxAttempts)
{
LogConnectorEvent(LogLevel.Warning, "fetch.retry.status", "Oracle CSAF document request returned transient status; retrying.", new Dictionary<string, object?>
{
["status"] = (int)response.StatusCode,
["attempt"] = attempt,
["uri"] = uri.ToString(),
});
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
delay = delay + delay;
continue;
}
response.EnsureSuccessStatusCode();
}
var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
return bytes;
}
catch (Exception ex) when (IsTransient(ex) && attempt < maxAttempts)
{
LogConnectorEvent(LogLevel.Warning, "fetch.retry.exception", "Oracle CSAF document request failed; retrying.", new Dictionary<string, object?>
{
["attempt"] = attempt,
["uri"] = uri.ToString(),
["exception"] = ex.GetType().Name,
});
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
delay = delay + delay;
}
}
LogConnectorEvent(LogLevel.Error, "fetch.failed", "Oracle CSAF document could not be retrieved after retries.", new Dictionary<string, object?>
{
["uri"] = uri.ToString(),
});
return null;
}
private static bool IsTransient(Exception exception)
=> exception is HttpRequestException or IOException or TaskCanceledException;
private static bool IsTransient(HttpStatusCode statusCode)
{
var status = (int)statusCode;
return status is >= 500 or 408 or 429;
}
private static string? NormalizeDigest(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
return null;
}
var trimmed = digest.Trim();
if (!trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
trimmed = "sha256:" + trimmed;
}
return trimmed.ToLowerInvariant();
}
private static DateTimeOffset UpdateLatest(DateTimeOffset current, DateTimeOffset published)
{
if (published == default)
{
return current;
}
return published > current ? published : current;
}
}
return CreateRawDocument(VexDocumentFormat.Csaf, entry.DocumentUri, payload.AsMemory(), metadata);
}
private async Task<byte[]?> DownloadWithRetryAsync(HttpClient client, Uri uri, CancellationToken cancellationToken)
{
const int maxAttempts = 3;
var delay = TimeSpan.FromSeconds(1);
for (var attempt = 1; attempt <= maxAttempts; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
using var response = await client.GetAsync(uri, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
if (IsTransient(response.StatusCode) && attempt < maxAttempts)
{
LogConnectorEvent(LogLevel.Warning, "fetch.retry.status", "Oracle CSAF document request returned transient status; retrying.", new Dictionary<string, object?>
{
["status"] = (int)response.StatusCode,
["attempt"] = attempt,
["uri"] = uri.ToString(),
});
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
delay = delay + delay;
continue;
}
response.EnsureSuccessStatusCode();
}
var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
return bytes;
}
catch (Exception ex) when (IsTransient(ex) && attempt < maxAttempts)
{
LogConnectorEvent(LogLevel.Warning, "fetch.retry.exception", "Oracle CSAF document request failed; retrying.", new Dictionary<string, object?>
{
["attempt"] = attempt,
["uri"] = uri.ToString(),
["exception"] = ex.GetType().Name,
});
await Task.Delay(delay, cancellationToken).ConfigureAwait(false);
delay = delay + delay;
}
}
LogConnectorEvent(LogLevel.Error, "fetch.failed", "Oracle CSAF document could not be retrieved after retries.", new Dictionary<string, object?>
{
["uri"] = uri.ToString(),
});
return null;
}
private static bool IsTransient(Exception exception)
=> exception is HttpRequestException or IOException or TaskCanceledException;
private static bool IsTransient(HttpStatusCode statusCode)
{
var status = (int)statusCode;
return status is >= 500 or 408 or 429;
}
private static string? NormalizeDigest(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
return null;
}
var trimmed = digest.Trim();
if (!trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
trimmed = "sha256:" + trimmed;
}
return trimmed.ToLowerInvariant();
}
private static DateTimeOffset UpdateLatest(DateTimeOffset current, DateTimeOffset published)
{
if (published == default)
{
return current;
}
return published > current ? published : current;
}
}

View File

@@ -9,7 +9,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />

View File

@@ -5,7 +5,7 @@ using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Excititor.Connectors.RedHat.CSAF.Configuration;
using StellaOps.Excititor.Connectors.RedHat.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
using System.IO.Abstractions;
namespace StellaOps.Excititor.Connectors.RedHat.CSAF.DependencyInjection;

View File

@@ -11,7 +11,7 @@ using StellaOps.Excititor.Connectors.Abstractions;
using StellaOps.Excititor.Connectors.RedHat.CSAF.Configuration;
using StellaOps.Excititor.Connectors.RedHat.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.RedHat.CSAF;

View File

@@ -8,7 +8,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />

View File

@@ -20,7 +20,7 @@ using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Metadata;
using StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.State;
using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub;

View File

@@ -1,11 +1,11 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.State;
public sealed record RancherHubCheckpointState(
@@ -15,84 +15,84 @@ public sealed record RancherHubCheckpointState(
ImmutableArray<string> Digests);
public sealed class RancherHubCheckpointManager
{
private const string CheckpointPrefix = "checkpoint:";
private readonly IVexConnectorStateRepository _repository;
public RancherHubCheckpointManager(IVexConnectorStateRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
public async ValueTask<RancherHubCheckpointState> LoadAsync(string connectorId, VexConnectorContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
var state = await _repository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false);
var cursor = ExtractCursor(state?.DocumentDigests ?? ImmutableArray<string>.Empty);
var digests = ExtractDigests(state?.DocumentDigests ?? ImmutableArray<string>.Empty);
var lastPublishedAt = state?.LastUpdated;
var effectiveSince = context.Since;
if (context.Settings.Values.TryGetValue("checkpoint", out var checkpointOverride) && !string.IsNullOrWhiteSpace(checkpointOverride))
{
cursor = checkpointOverride;
digests = ImmutableArray<string>.Empty;
}
if (effectiveSince is null && lastPublishedAt is not null)
{
effectiveSince = lastPublishedAt;
}
if (effectiveSince is not null && lastPublishedAt is not null && effectiveSince < lastPublishedAt)
{
digests = ImmutableArray<string>.Empty;
}
return new RancherHubCheckpointState(cursor, lastPublishedAt, effectiveSince, digests);
}
public ValueTask SaveAsync(string connectorId, string? cursor, DateTimeOffset? lastPublishedAt, ImmutableArray<string> digests, CancellationToken cancellationToken)
{
var entries = ImmutableArray.CreateBuilder<string>();
if (!string.IsNullOrWhiteSpace(cursor))
{
entries.Add($"{CheckpointPrefix}{cursor}");
}
foreach (var digest in digests)
{
if (string.IsNullOrWhiteSpace(digest))
{
continue;
}
if (digest.StartsWith(CheckpointPrefix, StringComparison.Ordinal))
{
continue;
}
entries.Add(digest);
}
var state = new VexConnectorState(connectorId, lastPublishedAt, entries.ToImmutable());
return _repository.SaveAsync(state, cancellationToken);
}
private static string? ExtractCursor(ImmutableArray<string> digests)
{
foreach (var entry in digests)
{
if (entry.StartsWith(CheckpointPrefix, StringComparison.Ordinal))
{
return entry[CheckpointPrefix.Length..];
}
}
return null;
}
private static ImmutableArray<string> ExtractDigests(ImmutableArray<string> digests)
=> digests.Where(d => !d.StartsWith(CheckpointPrefix, StringComparison.Ordinal)).ToImmutableArray();
}
{
private const string CheckpointPrefix = "checkpoint:";
private readonly IVexConnectorStateRepository _repository;
public RancherHubCheckpointManager(IVexConnectorStateRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
public async ValueTask<RancherHubCheckpointState> LoadAsync(string connectorId, VexConnectorContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
var state = await _repository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false);
var cursor = ExtractCursor(state?.DocumentDigests ?? ImmutableArray<string>.Empty);
var digests = ExtractDigests(state?.DocumentDigests ?? ImmutableArray<string>.Empty);
var lastPublishedAt = state?.LastUpdated;
var effectiveSince = context.Since;
if (context.Settings.Values.TryGetValue("checkpoint", out var checkpointOverride) && !string.IsNullOrWhiteSpace(checkpointOverride))
{
cursor = checkpointOverride;
digests = ImmutableArray<string>.Empty;
}
if (effectiveSince is null && lastPublishedAt is not null)
{
effectiveSince = lastPublishedAt;
}
if (effectiveSince is not null && lastPublishedAt is not null && effectiveSince < lastPublishedAt)
{
digests = ImmutableArray<string>.Empty;
}
return new RancherHubCheckpointState(cursor, lastPublishedAt, effectiveSince, digests);
}
public ValueTask SaveAsync(string connectorId, string? cursor, DateTimeOffset? lastPublishedAt, ImmutableArray<string> digests, CancellationToken cancellationToken)
{
var entries = ImmutableArray.CreateBuilder<string>();
if (!string.IsNullOrWhiteSpace(cursor))
{
entries.Add($"{CheckpointPrefix}{cursor}");
}
foreach (var digest in digests)
{
if (string.IsNullOrWhiteSpace(digest))
{
continue;
}
if (digest.StartsWith(CheckpointPrefix, StringComparison.Ordinal))
{
continue;
}
entries.Add(digest);
}
var state = new VexConnectorState(connectorId, lastPublishedAt, entries.ToImmutable());
return _repository.SaveAsync(state, cancellationToken);
}
private static string? ExtractCursor(ImmutableArray<string> digests)
{
foreach (var entry in digests)
{
if (entry.StartsWith(CheckpointPrefix, StringComparison.Ordinal))
{
return entry[CheckpointPrefix.Length..];
}
}
return null;
}
private static ImmutableArray<string> ExtractDigests(ImmutableArray<string> digests)
=> digests.Where(d => !d.StartsWith(CheckpointPrefix, StringComparison.Ordinal)).ToImmutableArray();
}

View File

@@ -8,7 +8,7 @@
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />

View File

@@ -9,7 +9,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" />

View File

@@ -14,7 +14,7 @@ using StellaOps.Excititor.Connectors.Abstractions.Trust;
using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Configuration;
using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Metadata;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Connectors.Ubuntu.CSAF;

View File

@@ -16,7 +16,7 @@ Provide ingestion/domain logic for VEX observations and linksets under the Aggre
## Roles
- Backend library engineer (.NET 10 / C# preview).
- QA automation (unit + integration against Mongo fixtures).
- QA automation (unit + integration against Postgres or in-memory fixtures).
## Working Agreements
1. Update sprint status on task transitions; log notable decisions in sprint Execution Log.
@@ -28,7 +28,7 @@ Provide ingestion/domain logic for VEX observations and linksets under the Aggre
## Testing & Determinism
- Write deterministic tests: seeded clocks/GUIDs, stable ordering of collections, ISO-8601 UTC timestamps.
- Cover linkset extraction ordering, supersede chain construction, and duplicate prevention.
- Use Mongo in-memory/test harness fixtures; do not rely on live services.
- Use Postgres test fixtures or in-memory harnesses; do not rely on live services.
## Boundaries
- Do not embed Policy Engine rules or Cartographer schemas here; expose contracts for consumers instead.

View File

@@ -0,0 +1,60 @@
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Excititor.Core.Storage;
/// <summary>
/// Persistent state snapshot for a connector run (resume tokens, failure counts, checkpoints).
/// </summary>
public sealed record VexConnectorState(
string ConnectorId,
DateTimeOffset? LastUpdated,
ImmutableArray<string> DocumentDigests,
ImmutableDictionary<string, string> ResumeTokens = default,
DateTimeOffset? LastSuccessAt = null,
int FailureCount = 0,
DateTimeOffset? NextEligibleRun = null,
string? LastFailureReason = null,
DateTimeOffset? LastCheckpoint = null)
{
public ImmutableDictionary<string, string> ResumeTokens { get; init; } = ResumeTokens.IsDefault
? ImmutableDictionary<string, string>.Empty
: ResumeTokens;
};
/// <summary>
/// Repository abstraction for connector state persistence.
/// </summary>
public interface IVexConnectorStateRepository
{
ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken);
ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<VexConnectorState>> ListAsync(CancellationToken cancellationToken);
}
/// <summary>
/// Provider registry persistence abstraction.
/// </summary>
public interface IVexProviderStore
{
ValueTask<VexProvider?> FindAsync(string id, CancellationToken cancellationToken);
ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<VexProvider>> ListAsync(CancellationToken cancellationToken);
}
/// <summary>
/// Claim store abstraction for VEX statements.
/// </summary>
public interface IVexClaimStore
{
ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<VexClaim>> FindByVulnerabilityAsync(string vulnerabilityId, int limit, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,710 @@
using System;
using System.Buffers;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Core.Storage;
/// <summary>
/// In-memory provider store used while Postgres implementations are brought online.
/// </summary>
public sealed class InMemoryVexProviderStore : IVexProviderStore
{
private readonly ConcurrentDictionary<string, VexProvider> _providers = new(StringComparer.OrdinalIgnoreCase);
public ValueTask<VexProvider?> FindAsync(string id, CancellationToken cancellationToken)
{
_providers.TryGetValue(id, out var provider);
return ValueTask.FromResult<VexProvider?>(provider);
}
public ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(provider);
_providers[provider.Id] = provider;
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyCollection<VexProvider>> ListAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyCollection<VexProvider>>(_providers.Values.ToList());
}
/// <summary>
/// In-memory connector state repository for deterministic tests and temporary storage.
/// </summary>
public sealed class InMemoryVexConnectorStateRepository : IVexConnectorStateRepository
{
private readonly ConcurrentDictionary<string, VexConnectorState> _states = new(StringComparer.OrdinalIgnoreCase);
public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken)
{
_states.TryGetValue(connectorId, out var state);
return ValueTask.FromResult<VexConnectorState?>(state);
}
public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(state);
_states[state.ConnectorId] = state with { LastUpdated = state.LastUpdated ?? DateTimeOffset.UtcNow };
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyCollection<VexConnectorState>> ListAsync(CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyCollection<VexConnectorState>>(_states.Values.ToList());
}
/// <summary>
/// In-memory claim store used while Mongo dependencies are removed.
/// </summary>
public sealed class InMemoryVexClaimStore : IVexClaimStore
{
private readonly ConcurrentBag<VexClaim> _claims = new();
public ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(claims);
foreach (var claim in claims)
{
_claims.Add(claim);
}
return ValueTask.CompletedTask;
}
public ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken)
{
var results = _claims.Where(c =>
string.Equals(c.VulnerabilityId, vulnerabilityId, StringComparison.OrdinalIgnoreCase) &&
string.Equals(c.Product.Key, productKey, StringComparison.OrdinalIgnoreCase) &&
(!since.HasValue || c.LastSeen >= since.Value))
.ToList();
return ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(results);
}
public ValueTask<IReadOnlyCollection<VexClaim>> FindByVulnerabilityAsync(string vulnerabilityId, int limit, CancellationToken cancellationToken)
{
var results = _claims
.Where(c => string.Equals(c.VulnerabilityId, vulnerabilityId, StringComparison.OrdinalIgnoreCase))
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(results);
}
}
/// <summary>
/// In-memory raw document store used for tests and sealed-mode fixtures while Mongo is removed.
/// Implements the same semantics as the Postgres raw store: canonical JSON, deterministic digests,
/// tenant scoping, and stable ordering.
/// </summary>
public sealed class InMemoryVexRawStore : IVexRawStore
{
private readonly ConcurrentDictionary<string, VexRawRecord> _records = new(StringComparer.OrdinalIgnoreCase);
private readonly int _inlineThreshold;
private readonly TimeProvider _timeProvider;
public InMemoryVexRawStore(int inlineThresholdBytes = 256 * 1024, TimeProvider? timeProvider = null)
{
_inlineThreshold = Math.Max(1, inlineThresholdBytes);
_timeProvider = timeProvider ?? TimeProvider.System;
}
public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(document);
cancellationToken.ThrowIfCancellationRequested();
var canonicalContent = CanonicalizeJson(document.Content);
var digest = EnsureDigest(document.Digest, canonicalContent);
var metadata = document.Metadata ?? ImmutableDictionary<string, string>.Empty;
var tenant = ResolveTenant(metadata);
var format = document.Format;
var retrievedAt = document.RetrievedAt;
var inline = canonicalContent.Length <= _inlineThreshold;
var recordedAt = _timeProvider.GetUtcNow();
var record = new VexRawRecord(
digest,
tenant,
document.ProviderId,
format,
document.SourceUri,
retrievedAt,
metadata,
inline ? canonicalContent : canonicalContent.ToArray(),
inline,
metadata.TryGetValue("supersedes", out var supersedes) ? supersedes : null,
metadata.TryGetValue("etag", out var etag) ? etag : null,
recordedAt);
_records.AddOrUpdate(digest, record, (_, existing) => existing);
return ValueTask.CompletedTask;
}
public ValueTask<VexRawRecord?> FindByDigestAsync(string digest, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
_records.TryGetValue(digest, out var record);
return ValueTask.FromResult<VexRawRecord?>(record);
}
public ValueTask<VexRawDocumentPage> QueryAsync(VexRawQuery query, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(query);
cancellationToken.ThrowIfCancellationRequested();
var filtered = _records.Values
.Where(r => string.Equals(r.Tenant, query.Tenant, StringComparison.OrdinalIgnoreCase))
.Where(r => query.ProviderIds.Count == 0 || query.ProviderIds.Contains(r.ProviderId, StringComparer.OrdinalIgnoreCase))
.Where(r => query.Digests.Count == 0 || query.Digests.Contains(r.Digest, StringComparer.OrdinalIgnoreCase))
.Where(r => query.Formats.Count == 0 || query.Formats.Contains(r.Format))
.Where(r => query.Since is null || r.RetrievedAt >= query.Since.Value)
.Where(r => query.Until is null || r.RetrievedAt <= query.Until.Value)
.OrderByDescending(r => r.RetrievedAt)
.ThenByDescending(r => r.Digest, StringComparer.Ordinal)
.ToList();
if (query.Cursor is not null)
{
filtered = filtered
.Where(r =>
r.RetrievedAt < query.Cursor.RetrievedAt ||
(r.RetrievedAt == query.Cursor.RetrievedAt && string.CompareOrdinal(r.Digest, query.Cursor.Digest) < 0))
.ToList();
}
var page = filtered.Take(query.Limit).ToList();
var hasMore = filtered.Count > page.Count;
var nextCursor = hasMore && page.Count > 0
? new VexRawCursor(page[^1].RetrievedAt, page[^1].Digest)
: null;
var summaries = page
.Select(r => new VexRawDocumentSummary(
r.Digest,
r.ProviderId,
r.Format,
r.SourceUri,
r.RetrievedAt,
r.InlineContent,
r.Metadata))
.ToList();
return ValueTask.FromResult(new VexRawDocumentPage(summaries, nextCursor, hasMore));
}
private static string ResolveTenant(IReadOnlyDictionary<string, string> metadata)
{
if (metadata.TryGetValue("tenant", out var tenant) && !string.IsNullOrWhiteSpace(tenant))
{
return tenant.Trim();
}
return "default";
}
private static byte[] CanonicalizeJson(ReadOnlyMemory<byte> content)
{
using var jsonDocument = JsonDocument.Parse(content);
using var buffer = new ArrayBufferWriter<byte>();
using (var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions { Indented = false }))
{
WriteCanonical(writer, jsonDocument.RootElement);
}
return buffer.WrittenMemory.ToArray();
}
private static void WriteCanonical(Utf8JsonWriter writer, JsonElement element)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
writer.WriteStartObject();
foreach (var property in element.EnumerateObject().OrderBy(p => p.Name, StringComparer.Ordinal))
{
writer.WritePropertyName(property.Name);
WriteCanonical(writer, property.Value);
}
writer.WriteEndObject();
break;
case JsonValueKind.Array:
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(writer, item);
}
writer.WriteEndArray();
break;
case JsonValueKind.String:
writer.WriteStringValue(element.GetString());
break;
case JsonValueKind.Number:
if (element.TryGetInt64(out var l))
{
writer.WriteNumberValue(l);
}
else if (element.TryGetDouble(out var d))
{
writer.WriteNumberValue(d);
}
else
{
writer.WriteRawValue(element.GetRawText());
}
break;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
break;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
break;
case JsonValueKind.Null:
case JsonValueKind.Undefined:
writer.WriteNullValue();
break;
default:
writer.WriteRawValue(element.GetRawText());
break;
}
}
private static string EnsureDigest(string digest, ReadOnlyMemory<byte> canonicalContent)
{
if (!string.IsNullOrWhiteSpace(digest) && digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
return digest;
}
Span<byte> hash = stackalloc byte[32];
if (!System.Security.Cryptography.SHA256.TryHashData(canonicalContent.Span, hash, out _))
{
hash = System.Security.Cryptography.SHA256.HashData(canonicalContent.ToArray());
}
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// In-memory append-only linkset store implementing both append semantics and read models.
/// </summary>
public sealed class InMemoryAppendOnlyLinksetStore : IAppendOnlyLinksetStore, IVexLinksetStore
{
private readonly Dictionary<string, VexLinkset> _linksets = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, List<LinksetMutationEvent>> _mutations = new(StringComparer.OrdinalIgnoreCase);
private long _sequenceNumber;
private readonly object _lock = new();
public ValueTask<AppendLinksetResult> AppendObservationAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexLinksetObservationRefModel observation,
VexProductScope scope,
CancellationToken cancellationToken)
{
return AppendObservationsBatchAsync(tenant, vulnerabilityId, productKey, new[] { observation }, scope, cancellationToken);
}
public ValueTask<AppendLinksetResult> AppendObservationsBatchAsync(
string tenant,
string vulnerabilityId,
string productKey,
IEnumerable<VexLinksetObservationRefModel> observations,
VexProductScope scope,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
var key = CreateKey(tenant, linksetId);
var wasCreated = false;
if (!_linksets.TryGetValue(key, out var linkset))
{
wasCreated = true;
linkset = new VexLinkset(
linksetId,
tenant,
vulnerabilityId,
productKey,
scope,
Enumerable.Empty<VexLinksetObservationRefModel>(),
null,
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow);
_linksets[key] = linkset;
AddMutation(key, LinksetMutationEvent.MutationTypes.LinksetCreated, null, null, null, null);
}
var existingObsIds = new HashSet<string>(linkset.Observations.Select(o => o.ObservationId), StringComparer.Ordinal);
var newObservations = observations
.Where(o => o is not null && !existingObsIds.Contains(o.ObservationId))
.ToList();
var observationsAdded = 0;
if (newObservations.Count > 0)
{
observationsAdded = newObservations.Count;
var merged = linkset.Observations.Concat(newObservations);
linkset = linkset.WithObservations(merged, linkset.Disagreements);
_linksets[key] = linkset;
foreach (var obs in newObservations)
{
AddMutation(key, LinksetMutationEvent.MutationTypes.ObservationAdded, obs.ObservationId, obs.ProviderId, obs.Status, obs.Confidence);
}
}
var sequence = _sequenceNumber;
return ValueTask.FromResult(wasCreated
? AppendLinksetResult.Created(linkset, observationsAdded, sequence)
: observationsAdded > 0
? AppendLinksetResult.Updated(linkset, observationsAdded, 0, sequence)
: AppendLinksetResult.NoChange(linkset, sequence));
}
}
public ValueTask<AppendLinksetResult> AppendDisagreementAsync(
string tenant,
string vulnerabilityId,
string productKey,
VexObservationDisagreement disagreement,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
var key = CreateKey(tenant, linksetId);
var wasCreated = false;
if (!_linksets.TryGetValue(key, out var linkset))
{
wasCreated = true;
linkset = new VexLinkset(
linksetId,
tenant,
vulnerabilityId,
productKey,
new VexProductScope(productKey, null, null, productKey, null, Array.Empty<string>()),
Enumerable.Empty<VexLinksetObservationRefModel>(),
Enumerable.Empty<VexObservationDisagreement>(),
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow);
}
var disagreements = linkset.Disagreements.ToList();
var existing = disagreements.Any(d =>
string.Equals(d.ProviderId, disagreement.ProviderId, StringComparison.OrdinalIgnoreCase) &&
string.Equals(d.Status, disagreement.Status, StringComparison.OrdinalIgnoreCase) &&
string.Equals(d.Justification, disagreement.Justification, StringComparison.OrdinalIgnoreCase));
var disagreementsAdded = 0;
if (!existing)
{
disagreements.Add(disagreement);
disagreementsAdded = 1;
}
var updated = linkset.WithObservations(linkset.Observations, disagreements);
_linksets[key] = updated;
if (wasCreated)
{
AddMutation(key, LinksetMutationEvent.MutationTypes.LinksetCreated, null, null, null, null);
}
if (disagreementsAdded > 0)
{
AddMutation(key, LinksetMutationEvent.MutationTypes.DisagreementAdded, null, disagreement.ProviderId, disagreement.Status, disagreement.Confidence);
}
var sequence = _sequenceNumber;
return ValueTask.FromResult(disagreementsAdded > 0 || wasCreated
? AppendLinksetResult.Updated(updated, 0, disagreementsAdded, sequence)
: AppendLinksetResult.NoChange(updated, sequence));
}
}
public ValueTask<VexLinkset?> GetByIdAsync(string tenant, string linksetId, CancellationToken cancellationToken)
=> ValueTask.FromResult(GetByKeyInternal(tenant, linksetId));
public ValueTask<VexLinkset?> GetByKeyAsync(string tenant, string vulnerabilityId, string productKey, CancellationToken cancellationToken)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
return ValueTask.FromResult(GetByKeyInternal(tenant, linksetId));
}
public ValueTask<IReadOnlyList<VexLinkset>> FindByVulnerabilityAsync(string tenant, string vulnerabilityId, int limit, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var results = _linksets.Values
.Where(ls => string.Equals(ls.Tenant, tenant, StringComparison.OrdinalIgnoreCase))
.Where(ls => string.Equals(ls.VulnerabilityId, vulnerabilityId, StringComparison.OrdinalIgnoreCase))
.OrderByDescending(ls => ls.UpdatedAt)
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
public ValueTask<IReadOnlyList<VexLinkset>> FindByProductKeyAsync(string tenant, string productKey, int limit, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var results = _linksets.Values
.Where(ls => string.Equals(ls.Tenant, tenant, StringComparison.OrdinalIgnoreCase))
.Where(ls => string.Equals(ls.ProductKey, productKey, StringComparison.OrdinalIgnoreCase))
.OrderByDescending(ls => ls.UpdatedAt)
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
public ValueTask<IReadOnlyList<VexLinkset>> FindWithConflictsAsync(string tenant, int limit, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var results = _linksets.Values
.Where(ls => string.Equals(ls.Tenant, tenant, StringComparison.OrdinalIgnoreCase))
.Where(ls => ls.HasConflicts)
.OrderByDescending(ls => ls.UpdatedAt)
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
public ValueTask<long> CountAsync(string tenant, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var count = _linksets.Values.Count(ls => string.Equals(ls.Tenant, tenant, StringComparison.OrdinalIgnoreCase));
return ValueTask.FromResult((long)count);
}
public ValueTask<long> CountWithConflictsAsync(string tenant, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var count = _linksets.Values.Count(ls =>
string.Equals(ls.Tenant, tenant, StringComparison.OrdinalIgnoreCase) && ls.HasConflicts);
return ValueTask.FromResult((long)count);
}
public ValueTask<IReadOnlyList<LinksetMutationEvent>> GetMutationLogAsync(string tenant, string linksetId, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var key = CreateKey(tenant, linksetId);
if (_mutations.TryGetValue(key, out var log))
{
return ValueTask.FromResult<IReadOnlyList<LinksetMutationEvent>>(log.ToList());
}
return ValueTask.FromResult<IReadOnlyList<LinksetMutationEvent>>(Array.Empty<LinksetMutationEvent>());
}
public ValueTask<bool> InsertAsync(VexLinkset linkset, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var key = CreateKey(linkset.Tenant, linkset.LinksetId);
if (_linksets.ContainsKey(key))
{
return ValueTask.FromResult(false);
}
_linksets[key] = linkset;
AddMutation(key, LinksetMutationEvent.MutationTypes.LinksetCreated, null, null, null, null);
return ValueTask.FromResult(true);
}
}
public ValueTask<bool> UpsertAsync(VexLinkset linkset, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var key = CreateKey(linkset.Tenant, linkset.LinksetId);
var created = !_linksets.ContainsKey(key);
_linksets[key] = linkset;
if (created)
{
AddMutation(key, LinksetMutationEvent.MutationTypes.LinksetCreated, null, null, null, null);
}
return ValueTask.FromResult(created);
}
}
public ValueTask<VexLinkset> GetOrCreateAsync(string tenant, string vulnerabilityId, string productKey, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var linksetId = VexLinkset.CreateLinksetId(tenant, vulnerabilityId, productKey);
var key = CreateKey(tenant, linksetId);
if (_linksets.TryGetValue(key, out var existing))
{
return ValueTask.FromResult(existing);
}
var scope = new VexProductScope(productKey, null, null, productKey, null, Array.Empty<string>());
var linkset = new VexLinkset(linksetId, tenant, vulnerabilityId, productKey, scope, Enumerable.Empty<VexLinksetObservationRefModel>());
_linksets[key] = linkset;
AddMutation(key, LinksetMutationEvent.MutationTypes.LinksetCreated, null, null, null, null);
return ValueTask.FromResult(linkset);
}
}
public ValueTask<IReadOnlyList<VexLinkset>> FindByProviderAsync(string tenant, string providerId, int limit, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var results = _linksets.Values
.Where(ls => string.Equals(ls.Tenant, tenant, StringComparison.OrdinalIgnoreCase))
.Where(ls => ls.Observations.Any(o => string.Equals(o.ProviderId, providerId, StringComparison.OrdinalIgnoreCase)))
.OrderByDescending(ls => ls.UpdatedAt)
.Take(limit)
.ToList();
return ValueTask.FromResult<IReadOnlyList<VexLinkset>>(results);
}
public ValueTask<bool> DeleteAsync(string tenant, string linksetId, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
lock (_lock)
{
var key = CreateKey(tenant, linksetId);
var removed = _linksets.Remove(key);
_mutations.Remove(key);
return ValueTask.FromResult(removed);
}
}
private VexLinkset? GetByKeyInternal(string tenant, string linksetId)
{
var key = CreateKey(tenant, linksetId);
_linksets.TryGetValue(key, out var linkset);
return linkset;
}
private void AddMutation(string key, string mutationType, string? observationId, string? providerId, string? status, double? confidence)
{
var sequence = ++_sequenceNumber;
if (!_mutations.TryGetValue(key, out var log))
{
log = new List<LinksetMutationEvent>();
_mutations[key] = log;
}
log.Add(new LinksetMutationEvent(sequence, mutationType, DateTimeOffset.UtcNow, observationId, providerId, status, confidence, null));
}
private static string CreateKey(string tenant, string linksetId)
=> $"{tenant.Trim().ToLowerInvariant()}|{linksetId}";
}
/// <summary>
/// In-memory observation store to unblock APIs while Postgres backing store is implemented.
/// </summary>
public sealed class InMemoryVexObservationStore : IVexObservationStore
{
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, VexObservation>> _tenants = new(StringComparer.OrdinalIgnoreCase);
public ValueTask<bool> InsertAsync(VexObservation observation, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(observation);
var tenantStore = _tenants.GetOrAdd(observation.Tenant, _ => new ConcurrentDictionary<string, VexObservation>(StringComparer.OrdinalIgnoreCase));
var inserted = tenantStore.TryAdd(observation.ObservationId, observation);
return ValueTask.FromResult(inserted);
}
public async ValueTask<bool> UpsertAsync(VexObservation observation, CancellationToken cancellationToken)
{
await InsertAsync(observation, cancellationToken).ConfigureAwait(false);
return true;
}
public ValueTask<int> InsertManyAsync(string tenant, IEnumerable<VexObservation> observations, CancellationToken cancellationToken)
{
if (observations is null)
{
return ValueTask.FromResult(0);
}
var count = 0;
foreach (var obs in observations)
{
if (string.Equals(obs.Tenant, tenant, StringComparison.OrdinalIgnoreCase))
{
if (InsertAsync(obs, cancellationToken).Result)
{
count++;
}
}
}
return ValueTask.FromResult(count);
}
public ValueTask<VexObservation?> GetByIdAsync(string tenant, string observationId, CancellationToken cancellationToken)
{
if (_tenants.TryGetValue(tenant, out var store) && store.TryGetValue(observationId, out var observation))
{
return ValueTask.FromResult<VexObservation?>(observation);
}
return ValueTask.FromResult<VexObservation?>(null);
}
public ValueTask<IReadOnlyList<VexObservation>> FindByVulnerabilityAndProductAsync(string tenant, string vulnerabilityId, string productKey, CancellationToken cancellationToken)
{
var results = _tenants.TryGetValue(tenant, out var store)
? store.Values
.Where(o => o.Statements.Any(s =>
string.Equals(s.VulnerabilityId, vulnerabilityId, StringComparison.OrdinalIgnoreCase) &&
string.Equals(s.ProductKey, productKey, StringComparison.OrdinalIgnoreCase)))
.OrderByDescending(o => o.CreatedAt)
.ToList()
: new List<VexObservation>();
return ValueTask.FromResult<IReadOnlyList<VexObservation>>(results);
}
public ValueTask<IReadOnlyList<VexObservation>> FindByProviderAsync(string tenant, string providerId, int limit, CancellationToken cancellationToken)
{
var results = _tenants.TryGetValue(tenant, out var store)
? store.Values
.Where(o => string.Equals(o.ProviderId, providerId, StringComparison.OrdinalIgnoreCase))
.OrderByDescending(o => o.CreatedAt)
.Take(limit)
.ToList()
: new List<VexObservation>();
return ValueTask.FromResult<IReadOnlyList<VexObservation>>(results);
}
public ValueTask<bool> DeleteAsync(string tenant, string observationId, CancellationToken cancellationToken)
{
if (_tenants.TryGetValue(tenant, out var store))
{
return ValueTask.FromResult(store.TryRemove(observationId, out _));
}
return ValueTask.FromResult(false);
}
public ValueTask<long> CountAsync(string tenant, CancellationToken cancellationToken)
{
var count = _tenants.TryGetValue(tenant, out var store)
? store.Count
: 0;
return ValueTask.FromResult((long)count);
}
}

View File

@@ -1,7 +0,0 @@
// Temporary stubs to allow legacy interfaces to compile while MongoDB is removed.
// These types are intentionally minimal; they do not perform any database operations.
namespace MongoDB.Driver;
public interface IClientSessionHandle : IAsyncDisposable, IDisposable
{
}

View File

@@ -8,7 +8,7 @@ using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Policy;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Export;

View File

@@ -15,7 +15,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" />
<ProjectReference Include="..\StellaOps.Excititor.Storage.Postgres\StellaOps.Excititor.Storage.Postgres.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,7 +1,7 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.Core.Storage;
namespace StellaOps.Excititor.Export;