feat: Implement BerkeleyDB reader for RPM databases
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
console-runner-image / build-runner-image (push) Has been cancelled
wine-csp-build / Build Wine CSP Image (push) Has been cancelled
wine-csp-build / Integration Tests (push) Has been cancelled
wine-csp-build / Security Scan (push) Has been cancelled
wine-csp-build / Generate SBOM (push) Has been cancelled
wine-csp-build / Publish Image (push) Has been cancelled
wine-csp-build / Air-Gap Bundle (push) Has been cancelled
wine-csp-build / Test Summary (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
console-runner-image / build-runner-image (push) Has been cancelled
wine-csp-build / Build Wine CSP Image (push) Has been cancelled
wine-csp-build / Integration Tests (push) Has been cancelled
wine-csp-build / Security Scan (push) Has been cancelled
wine-csp-build / Generate SBOM (push) Has been cancelled
wine-csp-build / Publish Image (push) Has been cancelled
wine-csp-build / Air-Gap Bundle (push) Has been cancelled
wine-csp-build / Test Summary (push) Has been cancelled
- Added BerkeleyDbReader class to read and extract RPM header blobs from BerkeleyDB hash databases. - Implemented methods to detect BerkeleyDB format and extract values, including handling of page sizes and magic numbers. - Added tests for BerkeleyDbReader to ensure correct functionality and header extraction. feat: Add Yarn PnP data tests - Created YarnPnpDataTests to validate package resolution and data loading from Yarn PnP cache. - Implemented tests for resolved keys, package presence, and loading from cache structure. test: Add egg-info package fixtures for Python tests - Created egg-info package fixtures for testing Python analyzers. - Included PKG-INFO, entry_points.txt, and installed-files.txt for comprehensive coverage. test: Enhance RPM database reader tests - Added tests for RpmDatabaseReader to validate fallback to legacy packages when SQLite is missing. - Implemented helper methods to create legacy package files and RPM headers for testing. test: Implement dual signing tests - Added DualSignTests to validate secondary signature addition when configured. - Created stub implementations for crypto providers and key resolvers to facilitate testing. chore: Update CI script for Playwright Chromium installation - Modified ci-console-exports.sh to ensure deterministic Chromium binary installation for console exports tests. - Added checks for Windows compatibility and environment variable setups for Playwright browsers.
This commit is contained in:
@@ -69,7 +69,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
var bundleId = GenerateBundleId(manifest);
|
||||
var manifestDigest = ComputeDigest(File.ReadAllBytes(manifestResult.ManifestPath));
|
||||
|
||||
var catalogEntry = new BundleCatalogEntry(
|
||||
var catalogEntry = new ImportModels.BundleCatalogEntry(
|
||||
request.TenantId ?? "default",
|
||||
bundleId,
|
||||
manifestDigest,
|
||||
@@ -79,12 +79,12 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
await _catalogRepository.UpsertAsync(catalogEntry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Register individual items
|
||||
var items = manifest.Exports?.Select(e => new BundleItem(
|
||||
var items = manifest.Exports?.Select(e => new ImportModels.BundleItem(
|
||||
request.TenantId ?? "default",
|
||||
bundleId,
|
||||
e.Key,
|
||||
e.ArtifactDigest,
|
||||
e.ArtifactSizeBytes ?? 0)) ?? Enumerable.Empty<BundleItem>();
|
||||
e.ArtifactSizeBytes ?? 0)) ?? Enumerable.Empty<ImportModels.BundleItem>();
|
||||
|
||||
await _itemRepository.UpsertManyAsync(items, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
@@ -238,10 +238,10 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
try
|
||||
{
|
||||
var envelopeJson = await File.ReadAllTextAsync(dsseFile, cancellationToken).ConfigureAwait(false);
|
||||
var envelope = ImportModels.DsseEnvelope.Parse(envelopeJson);
|
||||
var envelope = StellaOps.AirGap.Importer.Validation.DsseEnvelope.Parse(envelopeJson);
|
||||
|
||||
// Load trust roots if provided
|
||||
ImportModels.TrustRootConfig trustRoots;
|
||||
TrustRootConfig trustRoots;
|
||||
if (!string.IsNullOrWhiteSpace(trustRootsPath) && File.Exists(trustRootsPath))
|
||||
{
|
||||
trustRoots = await LoadTrustRootsAsync(trustRootsPath, cancellationToken).ConfigureAwait(false);
|
||||
@@ -287,7 +287,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<ImportModels.TrustRootConfig> LoadTrustRootsAsync(string path, CancellationToken cancellationToken)
|
||||
private static async Task<TrustRootConfig> LoadTrustRootsAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
var json = await File.ReadAllTextAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
@@ -324,7 +324,7 @@ public sealed class MirrorBundleImportService : IMirrorBundleImportService
|
||||
}
|
||||
}
|
||||
|
||||
return new ImportModels.TrustRootConfig(path, fingerprints, algorithms, null, null, publicKeys);
|
||||
return new TrustRootConfig(path, fingerprints, algorithms, null, null, publicKeys);
|
||||
}
|
||||
|
||||
private async Task<List<string>> CopyArtifactsAsync(string bundleDir, string dataStorePath, MirrorBundle manifest, CancellationToken cancellationToken)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Postgres.Advisories;
|
||||
@@ -7,66 +6,49 @@ using StellaOps.Concelier.Storage.Postgres.Advisories;
|
||||
namespace StellaOps.Concelier.WebService.DualWrite;
|
||||
|
||||
/// <summary>
|
||||
/// Dual-write advisory store that writes to both MongoDB and PostgreSQL simultaneously.
|
||||
/// Used during migration to verify parity between backends.
|
||||
/// Postgres-backed advisory store that implements the legacy Mongo contracts.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// MongoDB is the primary store; PostgreSQL writes are best-effort with error logging.
|
||||
/// Read operations are always served from MongoDB.
|
||||
/// </remarks>
|
||||
public sealed class DualWriteAdvisoryStore : IAdvisoryStore
|
||||
{
|
||||
private readonly AdvisoryStore _mongoStore;
|
||||
private readonly IPostgresAdvisoryStore _postgresStore;
|
||||
private readonly ILogger<DualWriteAdvisoryStore> _logger;
|
||||
|
||||
public DualWriteAdvisoryStore(
|
||||
AdvisoryStore mongoStore,
|
||||
IPostgresAdvisoryStore postgresStore,
|
||||
ILogger<DualWriteAdvisoryStore> logger)
|
||||
public DualWriteAdvisoryStore(IPostgresAdvisoryStore postgresStore, ILogger<DualWriteAdvisoryStore> logger)
|
||||
{
|
||||
_mongoStore = mongoStore ?? throw new ArgumentNullException(nameof(mongoStore));
|
||||
_postgresStore = postgresStore ?? throw new ArgumentNullException(nameof(postgresStore));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public async Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
|
||||
{
|
||||
// Write to MongoDB (primary)
|
||||
await _mongoStore.UpsertAsync(advisory, cancellationToken, session).ConfigureAwait(false);
|
||||
|
||||
// Write to PostgreSQL (secondary, best-effort)
|
||||
try
|
||||
{
|
||||
await _postgresStore.UpsertAsync(advisory, sourceId: null, cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogDebug("Dual-write success for advisory {AdvisoryKey}", advisory.AdvisoryKey);
|
||||
_logger.LogDebug("Stored advisory {AdvisoryKey} in PostgreSQL", advisory.AdvisoryKey);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log but don't fail - MongoDB is primary during migration
|
||||
_logger.LogWarning(ex, "Dual-write to PostgreSQL failed for advisory {AdvisoryKey}. MongoDB write succeeded.", advisory.AdvisoryKey);
|
||||
_logger.LogWarning(ex, "PostgreSQL advisory write failed for {AdvisoryKey}", advisory.AdvisoryKey);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
// Always read from MongoDB during dual-write mode
|
||||
return _mongoStore.FindAsync(advisoryKey, cancellationToken, session);
|
||||
return _postgresStore.FindAsync(advisoryKey, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
// Always read from MongoDB during dual-write mode
|
||||
return _mongoStore.GetRecentAsync(limit, cancellationToken, session);
|
||||
return _postgresStore.GetRecentAsync(limit, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
// Always read from MongoDB during dual-write mode
|
||||
return _mongoStore.StreamAsync(cancellationToken, session);
|
||||
return _postgresStore.StreamAsync(cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ using StellaOps.Concelier.Core.AirGap.Models;
|
||||
using StellaOps.Concelier.WebService.Diagnostics;
|
||||
using StellaOps.Concelier.WebService.Options;
|
||||
using StellaOps.Concelier.WebService.Results;
|
||||
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Extensions;
|
||||
|
||||
@@ -39,7 +40,7 @@ internal static class AirGapEndpointExtensions
|
||||
var catalog = await catalogService.GetCatalogAsync(cursor, limit, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(catalog);
|
||||
return HttpResults.Ok(catalog);
|
||||
});
|
||||
|
||||
// GET /api/v1/concelier/airgap/sources - List registered sources
|
||||
@@ -55,7 +56,7 @@ internal static class AirGapEndpointExtensions
|
||||
}
|
||||
|
||||
var sources = sourceRegistry.GetSources();
|
||||
return Results.Ok(new { sources, count = sources.Count });
|
||||
return HttpResults.Ok(new { sources, count = sources.Count });
|
||||
});
|
||||
|
||||
// POST /api/v1/concelier/airgap/sources - Register new source
|
||||
@@ -80,7 +81,7 @@ internal static class AirGapEndpointExtensions
|
||||
var source = await sourceRegistry.RegisterAsync(registration, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return Results.Created($"/api/v1/concelier/airgap/sources/{source.Id}", source);
|
||||
return HttpResults.Created($"/api/v1/concelier/airgap/sources/{source.Id}", source);
|
||||
});
|
||||
|
||||
// GET /api/v1/concelier/airgap/sources/{sourceId} - Get specific source
|
||||
@@ -102,7 +103,7 @@ internal static class AirGapEndpointExtensions
|
||||
return ConcelierProblemResultFactory.BundleSourceNotFound(context, sourceId);
|
||||
}
|
||||
|
||||
return Results.Ok(source);
|
||||
return HttpResults.Ok(source);
|
||||
});
|
||||
|
||||
// DELETE /api/v1/concelier/airgap/sources/{sourceId} - Unregister source
|
||||
@@ -123,7 +124,7 @@ internal static class AirGapEndpointExtensions
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return removed
|
||||
? Results.NoContent()
|
||||
? HttpResults.NoContent()
|
||||
: ConcelierProblemResultFactory.BundleSourceNotFound(context, sourceId);
|
||||
});
|
||||
|
||||
@@ -144,7 +145,7 @@ internal static class AirGapEndpointExtensions
|
||||
var result = await sourceRegistry.ValidateAsync(sourceId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(result);
|
||||
return HttpResults.Ok(result);
|
||||
});
|
||||
|
||||
// GET /api/v1/concelier/airgap/status - Sealed-mode status
|
||||
@@ -160,7 +161,7 @@ internal static class AirGapEndpointExtensions
|
||||
}
|
||||
|
||||
var status = sealedModeEnforcer.GetStatus();
|
||||
return Results.Ok(status);
|
||||
return HttpResults.Ok(status);
|
||||
});
|
||||
|
||||
// POST /api/v1/concelier/airgap/bundles/{bundleId}/import - Import a bundle with timeline event
|
||||
@@ -241,7 +242,7 @@ internal static class AirGapEndpointExtensions
|
||||
var timelineEvent = await timelineEmitter.EmitImportAsync(importRequest, importResult, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return Results.Ok(new BundleImportResponseDto
|
||||
return HttpResults.Ok(new BundleImportResponseDto
|
||||
{
|
||||
EventId = timelineEvent.EventId,
|
||||
BundleId = bundleId,
|
||||
|
||||
@@ -4,8 +4,9 @@ using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.WebService.Diagnostics;
|
||||
using StellaOps.Concelier.WebService.Options;
|
||||
using StellaOps.Concelier.WebService.Results;
|
||||
using StellaOps.Concelier.WebService.Services;
|
||||
using StellaOps.Concelier.WebService.Results;
|
||||
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||
|
||||
namespace StellaOps.Concelier.WebService.Extensions;
|
||||
|
||||
@@ -116,7 +117,7 @@ internal static class MirrorEndpointExtensions
|
||||
|
||||
private static bool TryAuthorize(bool requireAuthentication, bool enforceAuthority, HttpContext context, bool authorityConfigured, out IResult result)
|
||||
{
|
||||
result = Results.Empty;
|
||||
result = HttpResults.Empty;
|
||||
if (!requireAuthentication)
|
||||
{
|
||||
return true;
|
||||
@@ -133,7 +134,7 @@ internal static class MirrorEndpointExtensions
|
||||
}
|
||||
|
||||
context.Response.Headers.WWWAuthenticate = "Bearer realm=\"StellaOps Concelier Mirror\"";
|
||||
result = Results.StatusCode(StatusCodes.Status401Unauthorized);
|
||||
result = HttpResults.StatusCode(StatusCodes.Status401Unauthorized);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -154,7 +155,7 @@ internal static class MirrorEndpointExtensions
|
||||
context.Response.Headers.CacheControl = BuildCacheControlHeader(path);
|
||||
context.Response.Headers.LastModified = fileInfo.LastWriteTimeUtc.ToString("R", CultureInfo.InvariantCulture);
|
||||
context.Response.ContentLength = fileInfo.Length;
|
||||
return Task.FromResult(Results.Stream(stream, contentType));
|
||||
return Task.FromResult(HttpResults.Stream(stream, contentType));
|
||||
}
|
||||
|
||||
private static string ResolveContentType(string path)
|
||||
|
||||
@@ -22,8 +22,6 @@ using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Primitives;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
@@ -40,6 +38,7 @@ using StellaOps.Concelier.WebService.Options;
|
||||
using StellaOps.Concelier.WebService.Filters;
|
||||
using StellaOps.Concelier.WebService.Services;
|
||||
using StellaOps.Concelier.WebService.Telemetry;
|
||||
using StellaOps.Concelier.WebService.Results;
|
||||
using Serilog.Events;
|
||||
using StellaOps.Plugin.DependencyInjection;
|
||||
using StellaOps.Plugin.Hosting;
|
||||
@@ -50,23 +49,23 @@ using StellaOps.Auth.ServerIntegration;
|
||||
using StellaOps.Aoc;
|
||||
using StellaOps.Concelier.WebService.Deprecation;
|
||||
using StellaOps.Aoc.AspNetCore.Routing;
|
||||
using StellaOps.Aoc.AspNetCore.Results;
|
||||
using StellaOps.Concelier.WebService.Contracts;
|
||||
using StellaOps.Concelier.WebService.Results;
|
||||
using StellaOps.Concelier.Core.Aoc;
|
||||
using StellaOps.Concelier.Core.Raw;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Storage.Postgres;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Postgres;
|
||||
using StellaOps.Provenance.Mongo;
|
||||
using StellaOps.Concelier.Core.Attestation;
|
||||
using StellaOps.Concelier.Core.Signals;
|
||||
using AttestationClaims = StellaOps.Concelier.Core.Attestation.AttestationClaims;
|
||||
using StellaOps.Concelier.Core.Orchestration;
|
||||
using System.Diagnostics.Metrics;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Aoc.AspNetCore.Results;
|
||||
using StellaOps.Provenance.Mongo;
|
||||
using HttpResults = Microsoft.AspNetCore.Http.Results;
|
||||
|
||||
namespace StellaOps.Concelier.WebService
|
||||
{
|
||||
@@ -179,26 +178,6 @@ builder.Services.AddSingleton<MirrorRateLimiter>();
|
||||
builder.Services.AddSingleton<MirrorFileLocator>();
|
||||
|
||||
var isTesting = builder.Environment.IsEnvironment("Testing");
|
||||
var mongoBypass = isTesting || string.Equals(
|
||||
Environment.GetEnvironmentVariable("CONCELIER_BYPASS_MONGO"),
|
||||
"1",
|
||||
StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (!isTesting)
|
||||
{
|
||||
builder.Services.AddMongoStorage(storageOptions =>
|
||||
{
|
||||
storageOptions.ConnectionString = concelierOptions.Storage.Dsn;
|
||||
storageOptions.DatabaseName = concelierOptions.Storage.Database;
|
||||
storageOptions.CommandTimeout = TimeSpan.FromSeconds(concelierOptions.Storage.CommandTimeoutSeconds);
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
// In test host we entirely bypass Mongo validation/bootstrapping; tests inject fakes.
|
||||
builder.Services.RemoveAll<IMongoClient>();
|
||||
builder.Services.RemoveAll<IMongoDatabase>();
|
||||
}
|
||||
|
||||
// Add PostgreSQL storage for LNM linkset cache if configured.
|
||||
// This provides a PostgreSQL-backed implementation of IAdvisoryLinksetStore for the read-through cache.
|
||||
@@ -511,14 +490,14 @@ app.MapGet("/.well-known/openapi", ([FromServices] OpenApiDiscoveryDocumentProvi
|
||||
{
|
||||
context.Response.Headers.ETag = etag;
|
||||
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
|
||||
return Results.StatusCode(StatusCodes.Status304NotModified);
|
||||
return HttpResults.StatusCode(StatusCodes.Status304NotModified);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
context.Response.Headers.ETag = etag;
|
||||
context.Response.Headers.CacheControl = "public, max-age=300, immutable";
|
||||
return Results.Text(payload, "application/vnd.oai.openapi+json;version=3.1");
|
||||
return HttpResults.Text(payload, "application/vnd.oai.openapi+json;version=3.1");
|
||||
|
||||
static bool Matches(string? candidate, string expected)
|
||||
{
|
||||
@@ -587,7 +566,7 @@ orchestratorGroup.MapPost("/registry", async (
|
||||
|
||||
await store.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Accepted();
|
||||
return HttpResults.Accepted();
|
||||
}).WithName("UpsertOrchestratorRegistry");
|
||||
|
||||
orchestratorGroup.MapPost("/heartbeat", async (
|
||||
@@ -628,7 +607,7 @@ orchestratorGroup.MapPost("/heartbeat", async (
|
||||
timestamp);
|
||||
|
||||
await store.AppendHeartbeatAsync(heartbeat, cancellationToken).ConfigureAwait(false);
|
||||
return Results.Accepted();
|
||||
return HttpResults.Accepted();
|
||||
}).WithName("RecordOrchestratorHeartbeat");
|
||||
|
||||
orchestratorGroup.MapPost("/commands", async (
|
||||
@@ -672,7 +651,7 @@ orchestratorGroup.MapPost("/commands", async (
|
||||
request.ExpiresAt);
|
||||
|
||||
await store.EnqueueCommandAsync(command, cancellationToken).ConfigureAwait(false);
|
||||
return Results.Accepted();
|
||||
return HttpResults.Accepted();
|
||||
}).WithName("EnqueueOrchestratorCommand");
|
||||
|
||||
orchestratorGroup.MapGet("/commands", async (
|
||||
@@ -696,7 +675,7 @@ orchestratorGroup.MapGet("/commands", async (
|
||||
}
|
||||
|
||||
var commands = await store.GetPendingCommandsAsync(tenant, connectorId.Trim(), runId, afterSequence, cancellationToken).ConfigureAwait(false);
|
||||
return Results.Ok(commands);
|
||||
return HttpResults.Ok(commands);
|
||||
}).WithName("GetOrchestratorCommands");
|
||||
var observationsEndpoint = app.MapGet("/concelier/observations", async (
|
||||
HttpContext context,
|
||||
@@ -772,7 +751,7 @@ var observationsEndpoint = app.MapGet("/concelier/observations", async (
|
||||
result.NextCursor,
|
||||
result.HasMore);
|
||||
|
||||
return Results.Ok(response);
|
||||
return HttpResults.Ok(response);
|
||||
}).WithName("GetConcelierObservations");
|
||||
|
||||
const int DefaultLnmPageSize = 50;
|
||||
@@ -824,7 +803,7 @@ app.MapGet("/v1/lnm/linksets", async (
|
||||
items.Add(ToLnmResponse(linkset, includeConflicts.GetValueOrDefault(true), includeTimeline: false, includeObservations: false, summary));
|
||||
}
|
||||
|
||||
return Results.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
||||
return HttpResults.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
||||
}).WithName("ListLnmLinksets");
|
||||
|
||||
app.MapPost("/v1/lnm/linksets/search", async (
|
||||
@@ -874,7 +853,7 @@ app.MapPost("/v1/lnm/linksets/search", async (
|
||||
summary));
|
||||
}
|
||||
|
||||
return Results.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
||||
return HttpResults.Ok(new LnmLinksetPage(items, resolvedPage, resolvedPageSize, result.Total));
|
||||
}).WithName("SearchLnmLinksets");
|
||||
|
||||
app.MapGet("/v1/lnm/linksets/{advisoryId}", async (
|
||||
@@ -960,7 +939,7 @@ app.MapGet("/v1/lnm/linksets/{advisoryId}", async (
|
||||
var summary = await BuildObservationSummaryAsync(observationQueryService, tenant!, linkset, cancellationToken).ConfigureAwait(false);
|
||||
var response = ToLnmResponse(linkset, includeConflicts, includeTimeline: false, includeObservations: includeObservations, summary, cached: fromCache);
|
||||
|
||||
return Results.Ok(response);
|
||||
return HttpResults.Ok(response);
|
||||
}).WithName("GetLnmLinkset");
|
||||
|
||||
app.MapGet("/linksets", async (
|
||||
@@ -999,7 +978,7 @@ app.MapGet("/linksets", async (
|
||||
nextCursor = result.NextCursor
|
||||
};
|
||||
|
||||
return Results.Ok(payload);
|
||||
return HttpResults.Ok(payload);
|
||||
}).WithName("ListLinksetsLegacy");
|
||||
|
||||
if (authorityConfigured)
|
||||
@@ -1334,20 +1313,20 @@ var advisoryObservationsEndpoint = app.MapGet("/advisories/observations", async
|
||||
var query = context.Request.Query;
|
||||
|
||||
// Parse query parameters
|
||||
var aliases = query.TryGetValue("alias", out var aliasValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues)
|
||||
string[]? aliases = query.TryGetValue("alias", out var aliasValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues).ToArray()
|
||||
: null;
|
||||
|
||||
var purls = query.TryGetValue("purl", out var purlValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(purlValues)
|
||||
string[]? purls = query.TryGetValue("purl", out var purlValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(purlValues).ToArray()
|
||||
: null;
|
||||
|
||||
var cpes = query.TryGetValue("cpe", out var cpeValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(cpeValues)
|
||||
string[]? cpes = query.TryGetValue("cpe", out var cpeValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(cpeValues).ToArray()
|
||||
: null;
|
||||
|
||||
var observationIds = query.TryGetValue("id", out var idValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(idValues)
|
||||
string[]? observationIds = query.TryGetValue("id", out var idValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(idValues).ToArray()
|
||||
: null;
|
||||
|
||||
int? limit = null;
|
||||
@@ -1428,14 +1407,14 @@ var advisoryLinksetsEndpoint = app.MapGet("/advisories/linksets", async (
|
||||
var query = context.Request.Query;
|
||||
|
||||
// Parse advisory IDs (alias values like CVE-*, GHSA-*)
|
||||
var advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues)
|
||||
string[]? advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues).ToArray()
|
||||
: (query.TryGetValue("alias", out var aliasValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(aliasValues).ToArray()
|
||||
: null);
|
||||
|
||||
var sources = query.TryGetValue("source", out var sourceValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues)
|
||||
string[]? sources = query.TryGetValue("source", out var sourceValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues).ToArray()
|
||||
: null;
|
||||
|
||||
int? limit = null;
|
||||
@@ -1496,7 +1475,8 @@ var advisoryLinksetsEndpoint = app.MapGet("/advisories/linksets", async (
|
||||
linkset.Normalized.Purls,
|
||||
linkset.Normalized.Cpes,
|
||||
linkset.Normalized.Versions,
|
||||
null) // Ranges serialized differently
|
||||
null, // Ranges serialized differently
|
||||
null) // Severities not yet populated
|
||||
: null,
|
||||
false, // Not from cache
|
||||
Array.Empty<string>(),
|
||||
@@ -1533,12 +1513,12 @@ var advisoryLinksetsExportEndpoint = app.MapGet("/advisories/linksets/export", a
|
||||
|
||||
var query = context.Request.Query;
|
||||
|
||||
var advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues)
|
||||
string[]? advisoryIds = query.TryGetValue("advisoryId", out var advisoryIdValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(advisoryIdValues).ToArray()
|
||||
: null;
|
||||
|
||||
var sources = query.TryGetValue("source", out var sourceValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues)
|
||||
string[]? sources = query.TryGetValue("source", out var sourceValues)
|
||||
? AdvisoryRawRequestMapper.NormalizeStrings(sourceValues).ToArray()
|
||||
: null;
|
||||
|
||||
var options = new AdvisoryLinksetQueryOptions(tenant, advisoryIds, sources, 1000, null);
|
||||
@@ -1634,7 +1614,7 @@ app.MapPost("/internal/events/observations/publish", async (
|
||||
published++;
|
||||
}
|
||||
|
||||
return Results.Ok(new { tenant, published, requestedCount = request.ObservationIds.Count, timestamp = timeProvider.GetUtcNow() });
|
||||
return HttpResults.Ok(new { tenant, published, requestedCount = request.ObservationIds.Count, timestamp = timeProvider.GetUtcNow() });
|
||||
}).WithName("PublishObservationEvents");
|
||||
|
||||
// Internal endpoint for publishing linkset events to NATS/Redis.
|
||||
@@ -1681,7 +1661,7 @@ app.MapPost("/internal/events/linksets/publish", async (
|
||||
published++;
|
||||
}
|
||||
|
||||
return Results.Ok(new { tenant, published, requestedCount = request.AdvisoryIds.Count, hasMore = result.HasMore, timestamp = timeProvider.GetUtcNow() });
|
||||
return HttpResults.Ok(new { tenant, published, requestedCount = request.AdvisoryIds.Count, hasMore = result.HasMore, timestamp = timeProvider.GetUtcNow() });
|
||||
}).WithName("PublishLinksetEvents");
|
||||
|
||||
var advisoryEvidenceEndpoint = app.MapGet("/vuln/evidence/advisories/{advisoryKey}", async (
|
||||
@@ -1782,7 +1762,7 @@ var attestationVerifyEndpoint = app.MapPost("/internal/attestations/verify", asy
|
||||
request.PipelineVersion ?? evidenceOptions.PipelineVersion ?? "git:unknown"),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Json(claims);
|
||||
return HttpResults.Json(claims);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -1834,7 +1814,7 @@ var evidenceSnapshotEndpoint = app.MapGet("/obs/evidence/advisories/{advisoryKey
|
||||
TransparencyPath: File.Exists(transparencyPath) ? transparencyPath : null,
|
||||
PipelineVersion: options.PipelineVersion);
|
||||
|
||||
return Results.Json(response);
|
||||
return HttpResults.Json(response);
|
||||
});
|
||||
if (authorityConfigured)
|
||||
{
|
||||
@@ -1898,7 +1878,7 @@ var evidenceAttestationEndpoint = app.MapGet("/obs/attestations/advisories/{advi
|
||||
TransparencyPath: File.Exists(transparencyPath) ? transparencyPath : null,
|
||||
PipelineVersion: options.PipelineVersion);
|
||||
|
||||
return Results.Json(response);
|
||||
return HttpResults.Json(response);
|
||||
});
|
||||
if (authorityConfigured)
|
||||
{
|
||||
@@ -1927,7 +1907,7 @@ var incidentGetEndpoint = app.MapGet("/obs/incidents/advisories/{advisoryKey}",
|
||||
return Problem(context, "Incident not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, "No incident marker present.");
|
||||
}
|
||||
|
||||
return Results.Json(status);
|
||||
return HttpResults.Json(status);
|
||||
});
|
||||
if (authorityConfigured)
|
||||
{
|
||||
@@ -1967,7 +1947,7 @@ var incidentUpsertEndpoint = app.MapPost("/obs/incidents/advisories/{advisoryKey
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var status = await IncidentFileStore.ReadAsync(evidenceOptions, tenant!, advisoryKey, timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
|
||||
return Results.Json(status);
|
||||
return HttpResults.Json(status);
|
||||
});
|
||||
if (authorityConfigured)
|
||||
{
|
||||
@@ -1989,7 +1969,7 @@ var incidentDeleteEndpoint = app.MapDelete("/obs/incidents/advisories/{advisoryK
|
||||
|
||||
var evidenceOptions = concelierOptions.Value.Evidence ?? new ConcelierOptions.EvidenceBundleOptions();
|
||||
await IncidentFileStore.DeleteAsync(evidenceOptions, tenant!, advisoryKey, cancellationToken).ConfigureAwait(false);
|
||||
return Results.NoContent();
|
||||
return HttpResults.NoContent();
|
||||
});
|
||||
if (authorityConfigured)
|
||||
{
|
||||
@@ -2224,7 +2204,7 @@ var advisorySummaryEndpoint = app.MapGet("/advisories/summary", async (
|
||||
context.Response.Headers["X-Stella-Cache-Ttl"] = "0";
|
||||
|
||||
var response = AdvisorySummaryMapper.ToResponse(normalizedTenant, orderedItems, nextCursor, sortKey);
|
||||
return Results.Ok(response);
|
||||
return HttpResults.Ok(response);
|
||||
}).WithName("GetAdvisoriesSummary");
|
||||
|
||||
// Evidence batch (component-centric) endpoint for graph overlays / evidence exports.
|
||||
@@ -2292,7 +2272,7 @@ app.MapPost("/v1/evidence/batch", async (
|
||||
responses.Add(responseItem);
|
||||
}
|
||||
|
||||
return Results.Ok(new EvidenceBatchResponse(responses));
|
||||
return HttpResults.Ok(new EvidenceBatchResponse(responses));
|
||||
}).WithName("GetEvidenceBatch");
|
||||
|
||||
if (authorityConfigured)
|
||||
@@ -2384,6 +2364,7 @@ if (authorityConfigured)
|
||||
|
||||
app.MapGet("/concelier/advisories/{vulnerabilityKey}/replay", async (
|
||||
string vulnerabilityKey,
|
||||
HttpContext context,
|
||||
DateTimeOffset? asOf,
|
||||
[FromServices] IAdvisoryEventLog eventLog,
|
||||
CancellationToken cancellationToken) =>
|
||||
@@ -2468,7 +2449,7 @@ var statementProvenanceEndpoint = app.MapPost("/events/statements/{statementId:g
|
||||
return Problem(context, "Statement not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, ex.Message);
|
||||
}
|
||||
|
||||
return Results.Accepted($"/events/statements/{statementId}");
|
||||
return HttpResults.Accepted($"/events/statements/{statementId}");
|
||||
});
|
||||
|
||||
if (authorityConfigured)
|
||||
@@ -2509,7 +2490,7 @@ app.UseExceptionHandler(errorApp =>
|
||||
["traceId"] = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier,
|
||||
};
|
||||
|
||||
var problem = Results.Problem(
|
||||
var problem = HttpResults.Problem(
|
||||
detail: error?.Message,
|
||||
instance: context.Request.Path,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
@@ -2752,7 +2733,7 @@ IReadOnlyList<LnmLinksetTimeline> BuildTimeline(AdvisoryLinkset linkset, Linkset
|
||||
IResult JsonResult<T>(T value, int? statusCode = null)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(value, JsonOptions);
|
||||
return Results.Content(payload, "application/json", Encoding.UTF8, statusCode);
|
||||
return HttpResults.Content(payload, "application/json", Encoding.UTF8, statusCode);
|
||||
}
|
||||
|
||||
IResult Problem(HttpContext context, string title, int statusCode, string type, string? detail = null, IDictionary<string, object?>? extensions = null, string? errorCode = null)
|
||||
@@ -2789,7 +2770,7 @@ IResult Problem(HttpContext context, string title, int statusCode, string type,
|
||||
}
|
||||
|
||||
var payload = JsonSerializer.Serialize(problemDetails, JsonOptions);
|
||||
return Results.Content(payload, "application/problem+json", Encoding.UTF8, statusCode);
|
||||
return HttpResults.Content(payload, "application/problem+json", Encoding.UTF8, statusCode);
|
||||
}
|
||||
|
||||
bool TryResolveTenant(HttpContext context, bool requireHeader, out string tenant, out IResult? error)
|
||||
@@ -2833,14 +2814,14 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
|
||||
|
||||
if (enforceTenantAllowlist && !requiredTenants.Contains(tenant))
|
||||
{
|
||||
return Results.Forbid();
|
||||
return HttpResults.Forbid();
|
||||
}
|
||||
|
||||
var principal = context.User;
|
||||
|
||||
if (enforceAuthority && (principal?.Identity?.IsAuthenticated != true))
|
||||
{
|
||||
return Results.Unauthorized();
|
||||
return HttpResults.Unauthorized();
|
||||
}
|
||||
|
||||
if (principal?.Identity?.IsAuthenticated == true)
|
||||
@@ -2848,18 +2829,18 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
|
||||
var tenantClaim = principal.FindFirstValue(StellaOpsClaimTypes.Tenant);
|
||||
if (string.IsNullOrWhiteSpace(tenantClaim))
|
||||
{
|
||||
return Results.Forbid();
|
||||
return HttpResults.Forbid();
|
||||
}
|
||||
|
||||
var normalizedClaim = tenantClaim.Trim().ToLowerInvariant();
|
||||
if (!string.Equals(normalizedClaim, tenant, StringComparison.Ordinal))
|
||||
{
|
||||
return Results.Forbid();
|
||||
return HttpResults.Forbid();
|
||||
}
|
||||
|
||||
if (enforceTenantAllowlist && !requiredTenants.Contains(normalizedClaim))
|
||||
{
|
||||
return Results.Forbid();
|
||||
return HttpResults.Forbid();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3319,62 +3300,26 @@ app.MapGet("/health", ([FromServices] IOptions<ConcelierOptions> opts, [FromServ
|
||||
return JsonResult(response);
|
||||
});
|
||||
|
||||
app.MapGet("/ready", async ([FromServices] IMongoDatabase database, [FromServices] StellaOps.Concelier.WebService.Diagnostics.ServiceStatus status, HttpContext context, CancellationToken cancellationToken) =>
|
||||
app.MapGet("/ready", ([FromServices] StellaOps.Concelier.WebService.Diagnostics.ServiceStatus status, HttpContext context) =>
|
||||
{
|
||||
ApplyNoCache(context.Response);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
await database.RunCommandAsync((Command<BsonDocument>)"{ ping: 1 }", cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
stopwatch.Stop();
|
||||
status.RecordMongoCheck(success: true, latency: stopwatch.Elapsed, error: null);
|
||||
var snapshot = status.CreateSnapshot();
|
||||
var uptimeSeconds = Math.Max((snapshot.CapturedAt - snapshot.StartedAt).TotalSeconds, 0d);
|
||||
|
||||
var snapshot = status.CreateSnapshot();
|
||||
var uptimeSeconds = Math.Max((snapshot.CapturedAt - snapshot.StartedAt).TotalSeconds, 0d);
|
||||
var mongo = new MongoReadyHealth(
|
||||
Status: "bypassed",
|
||||
LatencyMs: null,
|
||||
CheckedAt: snapshot.LastReadyCheckAt,
|
||||
Error: "mongo disabled");
|
||||
|
||||
var mongo = new MongoReadyHealth(
|
||||
Status: "ready",
|
||||
LatencyMs: snapshot.LastMongoLatency?.TotalMilliseconds,
|
||||
CheckedAt: snapshot.LastReadyCheckAt,
|
||||
Error: null);
|
||||
var response = new ReadyDocument(
|
||||
Status: "ready",
|
||||
StartedAt: snapshot.StartedAt,
|
||||
UptimeSeconds: uptimeSeconds,
|
||||
Mongo: mongo);
|
||||
|
||||
var response = new ReadyDocument(
|
||||
Status: "ready",
|
||||
StartedAt: snapshot.StartedAt,
|
||||
UptimeSeconds: uptimeSeconds,
|
||||
Mongo: mongo);
|
||||
|
||||
return JsonResult(response);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
status.RecordMongoCheck(success: false, latency: stopwatch.Elapsed, error: ex.Message);
|
||||
|
||||
var snapshot = status.CreateSnapshot();
|
||||
var uptimeSeconds = Math.Max((snapshot.CapturedAt - snapshot.StartedAt).TotalSeconds, 0d);
|
||||
|
||||
var mongo = new MongoReadyHealth(
|
||||
Status: "unready",
|
||||
LatencyMs: snapshot.LastMongoLatency?.TotalMilliseconds,
|
||||
CheckedAt: snapshot.LastReadyCheckAt,
|
||||
Error: snapshot.LastMongoError ?? ex.Message);
|
||||
|
||||
var response = new ReadyDocument(
|
||||
Status: "unready",
|
||||
StartedAt: snapshot.StartedAt,
|
||||
UptimeSeconds: uptimeSeconds,
|
||||
Mongo: mongo);
|
||||
|
||||
var extensions = new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["mongoLatencyMs"] = snapshot.LastMongoLatency?.TotalMilliseconds,
|
||||
["mongoError"] = snapshot.LastMongoError ?? ex.Message,
|
||||
};
|
||||
|
||||
return Problem(context, "Mongo unavailable", StatusCodes.Status503ServiceUnavailable, ProblemTypes.ServiceUnavailable, snapshot.LastMongoError ?? ex.Message, extensions);
|
||||
}
|
||||
return JsonResult(response);
|
||||
});
|
||||
|
||||
app.MapGet("/diagnostics/aliases/{seed}", async (string seed, [FromServices] AliasGraphResolver resolver, HttpContext context, CancellationToken cancellationToken) =>
|
||||
@@ -3553,7 +3498,7 @@ var triggerJobEndpoint = app.MapPost("/jobs/{*jobKind}", async (string jobKind,
|
||||
JobMetrics.TriggerCounter.Add(1, tags);
|
||||
if (result.Run is null)
|
||||
{
|
||||
return Results.StatusCode(StatusCodes.Status202Accepted);
|
||||
return HttpResults.StatusCode(StatusCodes.Status202Accepted);
|
||||
}
|
||||
|
||||
var acceptedRun = JobRunResponse.FromSnapshot(result.Run);
|
||||
@@ -3638,7 +3583,7 @@ var concelierHealthEndpoint = app.MapGet("/obs/concelier/health", (
|
||||
Window: "5m",
|
||||
UpdatedAt: now.ToString("O", CultureInfo.InvariantCulture));
|
||||
|
||||
return Results.Ok(payload);
|
||||
return HttpResults.Ok(payload);
|
||||
});
|
||||
|
||||
var concelierTimelineEndpoint = app.MapGet("/obs/concelier/timeline", async (
|
||||
@@ -3702,7 +3647,7 @@ var concelierTimelineEndpoint = app.MapGet("/obs/concelier/timeline", async (
|
||||
context.Response.Headers["X-Next-Cursor"] = nextCursor.ToString(CultureInfo.InvariantCulture);
|
||||
logger.LogInformation("obs timeline emitted {Count} events for tenant {Tenant} starting at {StartId} next {Next}", events.Count, tenant, startId, nextCursor);
|
||||
|
||||
return Results.Empty;
|
||||
return HttpResults.Empty;
|
||||
});
|
||||
|
||||
// ==========================================
|
||||
@@ -3774,7 +3719,7 @@ app.MapGet("/v1/signals/symbols", async (
|
||||
|
||||
var result = await symbolProvider.QueryAsync(options, cancellationToken);
|
||||
|
||||
return Results.Ok(new SignalsSymbolQueryResponse(
|
||||
return HttpResults.Ok(new SignalsSymbolQueryResponse(
|
||||
Symbols: result.Symbols.Select(s => ToSymbolResponse(s)).ToList(),
|
||||
TotalCount: result.TotalCount,
|
||||
HasMore: result.HasMore,
|
||||
@@ -3807,7 +3752,7 @@ app.MapGet("/v1/signals/symbols/advisory/{advisoryId}", async (
|
||||
|
||||
var symbolSet = await symbolProvider.GetByAdvisoryAsync(tenant!, advisoryId.Trim(), cancellationToken);
|
||||
|
||||
return Results.Ok(ToSymbolSetResponse(symbolSet));
|
||||
return HttpResults.Ok(ToSymbolSetResponse(symbolSet));
|
||||
}).WithName("GetAffectedSymbolsByAdvisory");
|
||||
|
||||
app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
||||
@@ -3831,7 +3776,7 @@ app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
||||
|
||||
if (string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
return Problem(
|
||||
return HttpResults.Problem(
|
||||
statusCode: StatusCodes.Status400BadRequest,
|
||||
title: "Package URL required",
|
||||
detail: "The purl parameter is required.",
|
||||
@@ -3840,7 +3785,7 @@ app.MapGet("/v1/signals/symbols/package/{*purl}", async (
|
||||
|
||||
var symbolSet = await symbolProvider.GetByPackageAsync(tenant!, purl.Trim(), cancellationToken);
|
||||
|
||||
return Results.Ok(ToSymbolSetResponse(symbolSet));
|
||||
return HttpResults.Ok(ToSymbolSetResponse(symbolSet));
|
||||
}).WithName("GetAffectedSymbolsByPackage");
|
||||
|
||||
app.MapPost("/v1/signals/symbols/batch", async (
|
||||
@@ -3864,7 +3809,7 @@ app.MapPost("/v1/signals/symbols/batch", async (
|
||||
|
||||
if (request.AdvisoryIds is not { Count: > 0 })
|
||||
{
|
||||
return Problem(
|
||||
return HttpResults.Problem(
|
||||
statusCode: StatusCodes.Status400BadRequest,
|
||||
title: "Advisory IDs required",
|
||||
detail: "At least one advisoryId is required in the batch request.",
|
||||
@@ -3873,7 +3818,7 @@ app.MapPost("/v1/signals/symbols/batch", async (
|
||||
|
||||
if (request.AdvisoryIds.Count > 100)
|
||||
{
|
||||
return Problem(
|
||||
return HttpResults.Problem(
|
||||
statusCode: StatusCodes.Status400BadRequest,
|
||||
title: "Batch size exceeded",
|
||||
detail: "Maximum batch size is 100 advisory IDs.",
|
||||
@@ -3887,7 +3832,7 @@ app.MapPost("/v1/signals/symbols/batch", async (
|
||||
kvp => kvp.Key,
|
||||
kvp => ToSymbolSetResponse(kvp.Value)));
|
||||
|
||||
return Results.Ok(response);
|
||||
return HttpResults.Ok(response);
|
||||
}).WithName("GetAffectedSymbolsBatch");
|
||||
|
||||
app.MapGet("/v1/signals/symbols/exists/{advisoryId}", async (
|
||||
@@ -3916,7 +3861,7 @@ app.MapGet("/v1/signals/symbols/exists/{advisoryId}", async (
|
||||
|
||||
var exists = await symbolProvider.HasSymbolsAsync(tenant!, advisoryId.Trim(), cancellationToken);
|
||||
|
||||
return Results.Ok(new SignalsSymbolExistsResponse(Exists: exists, AdvisoryId: advisoryId.Trim()));
|
||||
return HttpResults.Ok(new SignalsSymbolExistsResponse(Exists: exists, AdvisoryId: advisoryId.Trim()));
|
||||
}).WithName("CheckAffectedSymbolsExist");
|
||||
|
||||
await app.RunAsync();
|
||||
@@ -4076,41 +4021,7 @@ static SignalsSymbolSetResponse ToSymbolSetResponse(AffectedSymbolSet symbolSet)
|
||||
|
||||
static async Task InitializeMongoAsync(WebApplication app)
|
||||
{
|
||||
// Skip Mongo initialization in testing/bypass mode.
|
||||
var isTesting = string.Equals(
|
||||
Environment.GetEnvironmentVariable("DOTNET_ENVIRONMENT"),
|
||||
"Testing",
|
||||
StringComparison.OrdinalIgnoreCase);
|
||||
var bypass = string.Equals(
|
||||
Environment.GetEnvironmentVariable("CONCELIER_BYPASS_MONGO"),
|
||||
"1",
|
||||
StringComparison.OrdinalIgnoreCase);
|
||||
if (isTesting || bypass)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await using var scope = app.Services.CreateAsyncScope();
|
||||
var bootstrapper = scope.ServiceProvider.GetRequiredService<MongoBootstrapper>();
|
||||
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("MongoBootstrapper");
|
||||
var status = scope.ServiceProvider.GetRequiredService<StellaOps.Concelier.WebService.Diagnostics.ServiceStatus>();
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
await bootstrapper.InitializeAsync(app.Lifetime.ApplicationStopping).ConfigureAwait(false);
|
||||
stopwatch.Stop();
|
||||
status.MarkBootstrapCompleted(stopwatch.Elapsed);
|
||||
logger.LogInformation("Mongo bootstrap completed in {ElapsedMs} ms", stopwatch.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
status.RecordMongoCheck(success: false, latency: stopwatch.Elapsed, error: ex.Message);
|
||||
logger.LogCritical(ex, "Mongo bootstrap failed after {ElapsedMs} ms", stopwatch.Elapsed.TotalMilliseconds);
|
||||
throw;
|
||||
}
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.9.2" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.Analyzers" Version="3.11.0" PrivateAssets="all" />
|
||||
<PackageReference Include="NETStandard.Library" Version="2.0.3" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -18,8 +18,8 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.Html;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Acsc.Internal;
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Acsc.Internal;
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cccs;
|
||||
@@ -145,13 +145,16 @@ public sealed class CccsConnector : IFeedConnector
|
||||
continue;
|
||||
}
|
||||
|
||||
var gridFsId = await _rawDocumentStorage.UploadAsync(
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
|
||||
_ = await _rawDocumentStorage.UploadAsync(
|
||||
SourceName,
|
||||
documentUri,
|
||||
payload,
|
||||
"application/json",
|
||||
expiresAt: null,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
ExpiresAt: null,
|
||||
cancellationToken,
|
||||
recordId).ConfigureAwait(false);
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
@@ -169,7 +172,6 @@ public sealed class CccsConnector : IFeedConnector
|
||||
metadata["cccs.alertType"] = rawDocument.AlertType!;
|
||||
}
|
||||
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
recordId,
|
||||
SourceName,
|
||||
@@ -182,8 +184,9 @@ public sealed class CccsConnector : IFeedConnector
|
||||
Metadata: metadata,
|
||||
Etag: null,
|
||||
LastModified: rawDocument.Modified ?? rawDocument.Published ?? result.LastModifiedUtc,
|
||||
PayloadId: gridFsId,
|
||||
ExpiresAt: null);
|
||||
PayloadId: recordId,
|
||||
ExpiresAt: null,
|
||||
Payload: payload);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Add(upserted.Id);
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cccs.Internal;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cccs.Internal;
|
||||
|
||||
internal static class CccsMapper
|
||||
{
|
||||
@@ -110,149 +110,149 @@ internal static class CccsMapper
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedPackage> BuildPackages(CccsAdvisoryDto dto, DateTimeOffset recordedAt)
|
||||
{
|
||||
if (dto.Products.Count == 0)
|
||||
{
|
||||
return Array.Empty<AffectedPackage>();
|
||||
}
|
||||
|
||||
var packages = new List<AffectedPackage>(dto.Products.Count);
|
||||
for (var index = 0; index < dto.Products.Count; index++)
|
||||
{
|
||||
var product = dto.Products[index];
|
||||
if (string.IsNullOrWhiteSpace(product))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var identifier = product.Trim();
|
||||
var provenance = new AdvisoryProvenance(
|
||||
CccsConnectorPlugin.SourceName,
|
||||
"package",
|
||||
identifier,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.AffectedPackages });
|
||||
|
||||
var rangeAnchor = $"cccs:{dto.SerialNumber}:{index}";
|
||||
var versionRanges = BuildVersionRanges(product, rangeAnchor, recordedAt);
|
||||
var normalizedVersions = BuildNormalizedVersions(versionRanges, rangeAnchor);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Vendor,
|
||||
identifier,
|
||||
platform: null,
|
||||
versionRanges: versionRanges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { provenance },
|
||||
normalizedVersions: normalizedVersions));
|
||||
}
|
||||
|
||||
return packages.Count == 0
|
||||
? Array.Empty<AffectedPackage>()
|
||||
: packages
|
||||
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(string productText, string rangeAnchor, DateTimeOffset recordedAt)
|
||||
{
|
||||
var versionText = ExtractFirstVersionToken(productText);
|
||||
if (string.IsNullOrWhiteSpace(versionText))
|
||||
{
|
||||
return Array.Empty<AffectedVersionRange>();
|
||||
}
|
||||
|
||||
var provenance = new AdvisoryProvenance(
|
||||
CccsConnectorPlugin.SourceName,
|
||||
"range",
|
||||
rangeAnchor,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges });
|
||||
|
||||
var vendorExtensions = new Dictionary<string, string>
|
||||
{
|
||||
["cccs.version.raw"] = versionText!,
|
||||
["cccs.anchor"] = rangeAnchor,
|
||||
};
|
||||
|
||||
var semVerResults = SemVerRangeRuleBuilder.Build(versionText!, patchedVersion: null, provenanceNote: rangeAnchor);
|
||||
if (semVerResults.Count > 0)
|
||||
{
|
||||
return semVerResults.Select(result =>
|
||||
new AffectedVersionRange(
|
||||
rangeKind: NormalizedVersionSchemes.SemVer,
|
||||
introducedVersion: result.Primitive.Introduced,
|
||||
fixedVersion: result.Primitive.Fixed,
|
||||
lastAffectedVersion: result.Primitive.LastAffected,
|
||||
rangeExpression: result.Expression ?? versionText!,
|
||||
provenance: provenance,
|
||||
primitives: new RangePrimitives(
|
||||
result.Primitive,
|
||||
Nevra: null,
|
||||
Evr: null,
|
||||
VendorExtensions: vendorExtensions)))
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
var primitives = new RangePrimitives(
|
||||
new SemVerPrimitive(
|
||||
Introduced: versionText,
|
||||
IntroducedInclusive: true,
|
||||
Fixed: null,
|
||||
FixedInclusive: false,
|
||||
LastAffected: null,
|
||||
LastAffectedInclusive: true,
|
||||
ConstraintExpression: null,
|
||||
ExactValue: versionText),
|
||||
Nevra: null,
|
||||
Evr: null,
|
||||
VendorExtensions: vendorExtensions);
|
||||
|
||||
return new[]
|
||||
{
|
||||
new AffectedVersionRange(
|
||||
rangeKind: NormalizedVersionSchemes.SemVer,
|
||||
introducedVersion: null,
|
||||
fixedVersion: null,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: versionText,
|
||||
provenance: provenance,
|
||||
primitives: primitives),
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||
IReadOnlyList<AffectedVersionRange> ranges,
|
||||
string rangeAnchor)
|
||||
{
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return Array.Empty<NormalizedVersionRule>();
|
||||
}
|
||||
|
||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(rangeAnchor);
|
||||
if (rule is not null)
|
||||
{
|
||||
rules.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules.ToArray();
|
||||
}
|
||||
|
||||
private static string? ExtractFirstVersionToken(string value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var match = Regex.Match(value, @"\d+(?:\.\d+){0,3}(?:[A-Za-z0-9\-_]*)?");
|
||||
return match.Success ? match.Value : null;
|
||||
}
|
||||
}
|
||||
private static IReadOnlyList<AffectedPackage> BuildPackages(CccsAdvisoryDto dto, DateTimeOffset recordedAt)
|
||||
{
|
||||
if (dto.Products.Count == 0)
|
||||
{
|
||||
return Array.Empty<AffectedPackage>();
|
||||
}
|
||||
|
||||
var packages = new List<AffectedPackage>(dto.Products.Count);
|
||||
for (var index = 0; index < dto.Products.Count; index++)
|
||||
{
|
||||
var product = dto.Products[index];
|
||||
if (string.IsNullOrWhiteSpace(product))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var identifier = product.Trim();
|
||||
var provenance = new AdvisoryProvenance(
|
||||
CccsConnectorPlugin.SourceName,
|
||||
"package",
|
||||
identifier,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.AffectedPackages });
|
||||
|
||||
var rangeAnchor = $"cccs:{dto.SerialNumber}:{index}";
|
||||
var versionRanges = BuildVersionRanges(product, rangeAnchor, recordedAt);
|
||||
var normalizedVersions = BuildNormalizedVersions(versionRanges, rangeAnchor);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Vendor,
|
||||
identifier,
|
||||
platform: null,
|
||||
versionRanges: versionRanges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { provenance },
|
||||
normalizedVersions: normalizedVersions));
|
||||
}
|
||||
|
||||
return packages.Count == 0
|
||||
? Array.Empty<AffectedPackage>()
|
||||
: packages
|
||||
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(string productText, string rangeAnchor, DateTimeOffset recordedAt)
|
||||
{
|
||||
var versionText = ExtractFirstVersionToken(productText);
|
||||
if (string.IsNullOrWhiteSpace(versionText))
|
||||
{
|
||||
return Array.Empty<AffectedVersionRange>();
|
||||
}
|
||||
|
||||
var provenance = new AdvisoryProvenance(
|
||||
CccsConnectorPlugin.SourceName,
|
||||
"range",
|
||||
rangeAnchor,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges });
|
||||
|
||||
var vendorExtensions = new Dictionary<string, string>
|
||||
{
|
||||
["cccs.version.raw"] = versionText!,
|
||||
["cccs.anchor"] = rangeAnchor,
|
||||
};
|
||||
|
||||
var semVerResults = SemVerRangeRuleBuilder.Build(versionText!, patchedVersion: null, provenanceNote: rangeAnchor);
|
||||
if (semVerResults.Count > 0)
|
||||
{
|
||||
return semVerResults.Select(result =>
|
||||
new AffectedVersionRange(
|
||||
rangeKind: NormalizedVersionSchemes.SemVer,
|
||||
introducedVersion: result.Primitive.Introduced,
|
||||
fixedVersion: result.Primitive.Fixed,
|
||||
lastAffectedVersion: result.Primitive.LastAffected,
|
||||
rangeExpression: result.Expression ?? versionText!,
|
||||
provenance: provenance,
|
||||
primitives: new RangePrimitives(
|
||||
result.Primitive,
|
||||
Nevra: null,
|
||||
Evr: null,
|
||||
VendorExtensions: vendorExtensions)))
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
var primitives = new RangePrimitives(
|
||||
new SemVerPrimitive(
|
||||
Introduced: versionText,
|
||||
IntroducedInclusive: true,
|
||||
Fixed: null,
|
||||
FixedInclusive: false,
|
||||
LastAffected: null,
|
||||
LastAffectedInclusive: true,
|
||||
ConstraintExpression: null,
|
||||
ExactValue: versionText),
|
||||
Nevra: null,
|
||||
Evr: null,
|
||||
VendorExtensions: vendorExtensions);
|
||||
|
||||
return new[]
|
||||
{
|
||||
new AffectedVersionRange(
|
||||
rangeKind: NormalizedVersionSchemes.SemVer,
|
||||
introducedVersion: null,
|
||||
fixedVersion: null,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: versionText,
|
||||
provenance: provenance,
|
||||
primitives: primitives),
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||
IReadOnlyList<AffectedVersionRange> ranges,
|
||||
string rangeAnchor)
|
||||
{
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return Array.Empty<NormalizedVersionRule>();
|
||||
}
|
||||
|
||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(rangeAnchor);
|
||||
if (rule is not null)
|
||||
{
|
||||
rules.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules.ToArray();
|
||||
}
|
||||
|
||||
private static string? ExtractFirstVersionToken(string value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var match = Regex.Match(value, @"\d+(?:\.\d+){0,3}(?:[A-Za-z0-9\-_]*)?");
|
||||
return match.Success ? match.Value : null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.Html;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertBund;
|
||||
|
||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertBund.Internal;
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertCc;
|
||||
|
||||
@@ -5,8 +5,8 @@ using System.Linq;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertCc.Internal;
|
||||
|
||||
|
||||
@@ -11,8 +11,8 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertFr;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertFr.Internal;
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertIn;
|
||||
|
||||
@@ -68,7 +68,7 @@ public sealed record TimeWindowCursorState(DateTimeOffset? LastWindowStart, Date
|
||||
{
|
||||
return value.BsonType switch
|
||||
{
|
||||
BsonType.DateTime => DateTime.SpecifyKind(value.ToUniversalTime(), DateTimeKind.Utc),
|
||||
BsonType.DateTime => new DateTimeOffset(value.ToUniversalTime(), TimeSpan.Zero),
|
||||
BsonType.String when DateTimeOffset.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(),
|
||||
_ => null,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.IO;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Common.Fetch;
|
||||
|
||||
@@ -9,6 +10,12 @@ namespace StellaOps.Concelier.Connector.Common.Fetch;
|
||||
public sealed class RawDocumentStorage
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, byte[]> _blobs = new();
|
||||
private readonly IDocumentStore? _documentStore;
|
||||
|
||||
public RawDocumentStorage(IDocumentStore? documentStore = null)
|
||||
{
|
||||
_documentStore = documentStore;
|
||||
}
|
||||
|
||||
public Task<Guid> UploadAsync(
|
||||
string sourceName,
|
||||
@@ -16,7 +23,7 @@ public sealed class RawDocumentStorage
|
||||
byte[] content,
|
||||
string? contentType,
|
||||
CancellationToken cancellationToken)
|
||||
=> UploadAsync(sourceName, uri, content, contentType, expiresAt: null, cancellationToken);
|
||||
=> UploadAsync(sourceName, uri, content, contentType, ExpiresAt: null, cancellationToken);
|
||||
|
||||
public async Task<Guid> UploadAsync(
|
||||
string sourceName,
|
||||
@@ -39,11 +46,21 @@ public sealed class RawDocumentStorage
|
||||
return id;
|
||||
}
|
||||
|
||||
public Task<byte[]> DownloadAsync(Guid id, CancellationToken cancellationToken)
|
||||
public async Task<byte[]> DownloadAsync(Guid id, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_blobs.TryGetValue(id, out var bytes))
|
||||
{
|
||||
return Task.FromResult(bytes);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
if (_documentStore is not null)
|
||||
{
|
||||
var record = await _documentStore.FindAsync(id, cancellationToken).ConfigureAwait(false);
|
||||
if (record?.Payload is { Length: > 0 })
|
||||
{
|
||||
_blobs[id] = record.Payload;
|
||||
return record.Payload;
|
||||
}
|
||||
}
|
||||
|
||||
throw new FileNotFoundException($"Blob {id} not found.");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,160 +9,160 @@ using StellaOps.Concelier.Connector.Common.Xml;
|
||||
using StellaOps.Concelier.Core.Aoc;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Common.Http;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults.
|
||||
/// </summary>
|
||||
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<SourceHttpClientOptions> configure)
|
||||
=> services.AddSourceHttpClient(name, (_, options) => configure(options));
|
||||
|
||||
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<IServiceProvider, SourceHttpClientOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentException.ThrowIfNullOrEmpty(name);
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
services.AddOptions<SourceHttpClientOptions>(name).Configure<IServiceProvider>((options, sp) =>
|
||||
{
|
||||
configure(sp, options);
|
||||
SourceHttpClientConfigurationBinder.Apply(sp, name, options);
|
||||
});
|
||||
|
||||
return services
|
||||
.AddHttpClient(name)
|
||||
.ConfigureHttpClient((sp, client) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name);
|
||||
|
||||
if (options.BaseAddress is not null)
|
||||
{
|
||||
client.BaseAddress = options.BaseAddress;
|
||||
}
|
||||
|
||||
client.Timeout = options.Timeout;
|
||||
client.DefaultRequestHeaders.UserAgent.Clear();
|
||||
client.DefaultRequestHeaders.UserAgent.ParseAdd(options.UserAgent);
|
||||
client.DefaultRequestVersion = options.RequestVersion;
|
||||
client.DefaultVersionPolicy = options.VersionPolicy;
|
||||
|
||||
foreach (var header in options.DefaultRequestHeaders)
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation(header.Key, header.Value);
|
||||
}
|
||||
})
|
||||
.ConfigurePrimaryHttpMessageHandler((sp) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
||||
var handler = new SocketsHttpHandler
|
||||
{
|
||||
AllowAutoRedirect = options.AllowAutoRedirect,
|
||||
AutomaticDecompression = DecompressionMethods.All,
|
||||
EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections,
|
||||
};
|
||||
options.ConfigureHandler?.Invoke(handler);
|
||||
ApplyProxySettings(handler, options);
|
||||
|
||||
if (options.ServerCertificateCustomValidation is not null)
|
||||
{
|
||||
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, sslPolicyErrors) =>
|
||||
{
|
||||
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
||||
X509Certificate2? disposable = null;
|
||||
if (certToValidate is null && certificate is not null)
|
||||
{
|
||||
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
||||
certToValidate = disposable;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return options.ServerCertificateCustomValidation(certToValidate, chain, sslPolicyErrors);
|
||||
}
|
||||
finally
|
||||
{
|
||||
disposable?.Dispose();
|
||||
}
|
||||
};
|
||||
}
|
||||
else if (options.TrustedRootCertificates.Count > 0 && handler.SslOptions.RemoteCertificateValidationCallback is null)
|
||||
{
|
||||
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, errors) =>
|
||||
{
|
||||
if (errors == SslPolicyErrors.None)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (certificate is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
||||
X509Certificate2? disposable = null;
|
||||
var trustedRootCopies = new X509Certificate2Collection();
|
||||
try
|
||||
{
|
||||
if (certToValidate is null)
|
||||
{
|
||||
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
||||
certToValidate = disposable;
|
||||
}
|
||||
|
||||
foreach (var root in options.TrustedRootCertificates)
|
||||
{
|
||||
trustedRootCopies.Add(new X509Certificate2(root.RawData));
|
||||
}
|
||||
|
||||
using var customChain = new X509Chain();
|
||||
customChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust;
|
||||
customChain.ChainPolicy.CustomTrustStore.Clear();
|
||||
customChain.ChainPolicy.CustomTrustStore.AddRange(trustedRootCopies);
|
||||
customChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck;
|
||||
customChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag;
|
||||
|
||||
if (chain is not null)
|
||||
{
|
||||
foreach (var element in chain.ChainElements)
|
||||
{
|
||||
customChain.ChainPolicy.ExtraStore.Add(element.Certificate);
|
||||
}
|
||||
}
|
||||
|
||||
return certToValidate is not null && customChain.Build(certToValidate);
|
||||
}
|
||||
finally
|
||||
{
|
||||
foreach (X509Certificate2 root in trustedRootCopies)
|
||||
{
|
||||
root.Dispose();
|
||||
}
|
||||
|
||||
disposable?.Dispose();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return handler;
|
||||
})
|
||||
.AddHttpMessageHandler(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
||||
return new AllowlistedHttpMessageHandler(options);
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers shared helpers used by source connectors.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSourceCommon(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Common.Http;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults.
|
||||
/// </summary>
|
||||
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<SourceHttpClientOptions> configure)
|
||||
=> services.AddSourceHttpClient(name, (_, options) => configure(options));
|
||||
|
||||
public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<IServiceProvider, SourceHttpClientOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentException.ThrowIfNullOrEmpty(name);
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
services.AddOptions<SourceHttpClientOptions>(name).Configure<IServiceProvider>((options, sp) =>
|
||||
{
|
||||
configure(sp, options);
|
||||
SourceHttpClientConfigurationBinder.Apply(sp, name, options);
|
||||
});
|
||||
|
||||
return services
|
||||
.AddHttpClient(name)
|
||||
.ConfigureHttpClient((sp, client) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name);
|
||||
|
||||
if (options.BaseAddress is not null)
|
||||
{
|
||||
client.BaseAddress = options.BaseAddress;
|
||||
}
|
||||
|
||||
client.Timeout = options.Timeout;
|
||||
client.DefaultRequestHeaders.UserAgent.Clear();
|
||||
client.DefaultRequestHeaders.UserAgent.ParseAdd(options.UserAgent);
|
||||
client.DefaultRequestVersion = options.RequestVersion;
|
||||
client.DefaultVersionPolicy = options.VersionPolicy;
|
||||
|
||||
foreach (var header in options.DefaultRequestHeaders)
|
||||
{
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation(header.Key, header.Value);
|
||||
}
|
||||
})
|
||||
.ConfigurePrimaryHttpMessageHandler((sp) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
||||
var handler = new SocketsHttpHandler
|
||||
{
|
||||
AllowAutoRedirect = options.AllowAutoRedirect,
|
||||
AutomaticDecompression = DecompressionMethods.All,
|
||||
EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections,
|
||||
};
|
||||
options.ConfigureHandler?.Invoke(handler);
|
||||
ApplyProxySettings(handler, options);
|
||||
|
||||
if (options.ServerCertificateCustomValidation is not null)
|
||||
{
|
||||
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, sslPolicyErrors) =>
|
||||
{
|
||||
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
||||
X509Certificate2? disposable = null;
|
||||
if (certToValidate is null && certificate is not null)
|
||||
{
|
||||
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
||||
certToValidate = disposable;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return options.ServerCertificateCustomValidation(certToValidate, chain, sslPolicyErrors);
|
||||
}
|
||||
finally
|
||||
{
|
||||
disposable?.Dispose();
|
||||
}
|
||||
};
|
||||
}
|
||||
else if (options.TrustedRootCertificates.Count > 0 && handler.SslOptions.RemoteCertificateValidationCallback is null)
|
||||
{
|
||||
handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, errors) =>
|
||||
{
|
||||
if (errors == SslPolicyErrors.None)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (certificate is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
X509Certificate2? certToValidate = certificate as X509Certificate2;
|
||||
X509Certificate2? disposable = null;
|
||||
var trustedRootCopies = new X509Certificate2Collection();
|
||||
try
|
||||
{
|
||||
if (certToValidate is null)
|
||||
{
|
||||
disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert));
|
||||
certToValidate = disposable;
|
||||
}
|
||||
|
||||
foreach (var root in options.TrustedRootCertificates)
|
||||
{
|
||||
trustedRootCopies.Add(new X509Certificate2(root.RawData));
|
||||
}
|
||||
|
||||
using var customChain = new X509Chain();
|
||||
customChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust;
|
||||
customChain.ChainPolicy.CustomTrustStore.Clear();
|
||||
customChain.ChainPolicy.CustomTrustStore.AddRange(trustedRootCopies);
|
||||
customChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck;
|
||||
customChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag;
|
||||
|
||||
if (chain is not null)
|
||||
{
|
||||
foreach (var element in chain.ChainElements)
|
||||
{
|
||||
customChain.ChainPolicy.ExtraStore.Add(element.Certificate);
|
||||
}
|
||||
}
|
||||
|
||||
return certToValidate is not null && customChain.Build(certToValidate);
|
||||
}
|
||||
finally
|
||||
{
|
||||
foreach (X509Certificate2 root in trustedRootCopies)
|
||||
{
|
||||
root.Dispose();
|
||||
}
|
||||
|
||||
disposable?.Dispose();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return handler;
|
||||
})
|
||||
.AddHttpMessageHandler(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone();
|
||||
return new AllowlistedHttpMessageHandler(options);
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers shared helpers used by source connectors.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSourceCommon(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
services.AddSingleton<Json.JsonSchemaValidator>();
|
||||
services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>());
|
||||
services.AddSingleton<XmlSchemaValidator>();
|
||||
@@ -170,40 +170,40 @@ public static class ServiceCollectionExtensions
|
||||
services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>();
|
||||
services.AddConcelierAocGuards();
|
||||
services.AddConcelierLinksetMappers();
|
||||
services.TryAddSingleton<IDocumentStore, InMemoryDocumentStore>();
|
||||
services.AddSingleton<Fetch.RawDocumentStorage>();
|
||||
services.AddSingleton<Fetch.SourceFetchService>();
|
||||
services.TryAddScoped<IDocumentStore, InMemoryDocumentStore>();
|
||||
services.AddScoped<Fetch.RawDocumentStorage>();
|
||||
services.AddScoped<Fetch.SourceFetchService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options)
|
||||
{
|
||||
if (options.ProxyAddress is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var proxy = new WebProxy(options.ProxyAddress)
|
||||
{
|
||||
BypassProxyOnLocal = options.ProxyBypassOnLocal,
|
||||
UseDefaultCredentials = options.ProxyUseDefaultCredentials,
|
||||
};
|
||||
|
||||
if (options.ProxyBypassList.Count > 0)
|
||||
{
|
||||
proxy.BypassList = options.ProxyBypassList.ToArray();
|
||||
}
|
||||
|
||||
if (!options.ProxyUseDefaultCredentials
|
||||
&& !string.IsNullOrWhiteSpace(options.ProxyUsername))
|
||||
{
|
||||
proxy.Credentials = new NetworkCredential(
|
||||
options.ProxyUsername,
|
||||
options.ProxyPassword ?? string.Empty);
|
||||
}
|
||||
|
||||
handler.Proxy = proxy;
|
||||
handler.UseProxy = true;
|
||||
}
|
||||
}
|
||||
|
||||
private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options)
|
||||
{
|
||||
if (options.ProxyAddress is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var proxy = new WebProxy(options.ProxyAddress)
|
||||
{
|
||||
BypassProxyOnLocal = options.ProxyBypassOnLocal,
|
||||
UseDefaultCredentials = options.ProxyUseDefaultCredentials,
|
||||
};
|
||||
|
||||
if (options.ProxyBypassList.Count > 0)
|
||||
{
|
||||
proxy.BypassList = options.ProxyBypassList.ToArray();
|
||||
}
|
||||
|
||||
if (!options.ProxyUseDefaultCredentials
|
||||
&& !string.IsNullOrWhiteSpace(options.ProxyUsername))
|
||||
{
|
||||
proxy.Credentials = new NetworkCredential(
|
||||
options.ProxyUsername,
|
||||
options.ProxyPassword ?? string.Empty);
|
||||
}
|
||||
|
||||
handler.Proxy = proxy;
|
||||
handler.UseProxy = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,18 +144,21 @@ public sealed class SourceStateSeedProcessor
|
||||
|
||||
var existing = await _documentStore.FindBySourceAndUriAsync(source, document.Uri, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var recordId = document.DocumentId ?? existing?.Id ?? Guid.NewGuid();
|
||||
|
||||
if (existing?.PayloadId is { } oldGridId)
|
||||
{
|
||||
await _rawDocumentStorage.DeleteAsync(oldGridId, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var gridId = await _rawDocumentStorage.UploadAsync(
|
||||
_ = await _rawDocumentStorage.UploadAsync(
|
||||
source,
|
||||
document.Uri,
|
||||
payload,
|
||||
document.ContentType,
|
||||
document.ExpiresAt,
|
||||
cancellationToken)
|
||||
cancellationToken,
|
||||
recordId)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var headers = CloneDictionary(document.Headers);
|
||||
@@ -171,7 +174,7 @@ public sealed class SourceStateSeedProcessor
|
||||
var metadata = CloneDictionary(document.Metadata);
|
||||
|
||||
var record = new MongoContracts.DocumentRecord(
|
||||
document.DocumentId ?? existing?.Id ?? Guid.NewGuid(),
|
||||
recordId,
|
||||
source,
|
||||
document.Uri,
|
||||
document.FetchedAt ?? completedAt,
|
||||
@@ -182,8 +185,9 @@ public sealed class SourceStateSeedProcessor
|
||||
metadata,
|
||||
document.Etag,
|
||||
document.LastModified,
|
||||
gridId,
|
||||
document.ExpiresAt);
|
||||
recordId,
|
||||
document.ExpiresAt,
|
||||
payload);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Cve.Configuration;
|
||||
using StellaOps.Concelier.Connector.Cve.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cve;
|
||||
@@ -510,24 +510,7 @@ public sealed class CveConnector : IFeedConnector
|
||||
|
||||
var sha256 = Convert.ToHexString(SHA256.HashData(payload)).ToLowerInvariant();
|
||||
var lastModified = dto.Modified ?? dto.Published ?? now;
|
||||
ObjectId gridId = ObjectId.Empty;
|
||||
|
||||
try
|
||||
{
|
||||
if (existing?.PayloadId is ObjectId existingGrid && existingGrid != ObjectId.Empty)
|
||||
{
|
||||
gridId = existingGrid;
|
||||
}
|
||||
else
|
||||
{
|
||||
gridId = await _rawDocumentStorage.UploadAsync(SourceName, uri, payload, "application/json", cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Unable to store CVE seed payload for {CveId}", dto.CveId);
|
||||
continue;
|
||||
}
|
||||
await _rawDocumentStorage.UploadAsync(SourceName, uri, payload, "application/json", ExpiresAt: null, cancellationToken, documentId).ConfigureAwait(false);
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
@@ -547,7 +530,8 @@ public sealed class CveConnector : IFeedConnector
|
||||
Metadata: metadata,
|
||||
Etag: null,
|
||||
LastModified: lastModified,
|
||||
PayloadId: gridId);
|
||||
PayloadId: documentId,
|
||||
Payload: payload);
|
||||
|
||||
await _documentStore.UpsertAsync(document, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using NuGet.Versioning;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cve.Internal;
|
||||
|
||||
@@ -16,8 +16,8 @@ using StellaOps.Concelier.Connector.Distro.Debian.Configuration;
|
||||
using StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.Debian;
|
||||
|
||||
@@ -7,7 +7,7 @@ internal sealed record DebianFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
||||
{
|
||||
public static DebianFetchCacheEntry Empty { get; } = new(null, null);
|
||||
|
||||
public static DebianFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
||||
public static DebianFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||
=> new(document.Etag, document.LastModified);
|
||||
|
||||
public static DebianFetchCacheEntry FromBson(BsonDocument document)
|
||||
@@ -54,7 +54,7 @@ internal sealed record DebianFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
||||
return document;
|
||||
}
|
||||
|
||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
|
||||
@@ -4,7 +4,7 @@ using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Distro;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
||||
|
||||
@@ -146,24 +146,24 @@ internal static class DebianMapper
|
||||
continue;
|
||||
}
|
||||
|
||||
var provenance = new[] { BuildPackageProvenance(package, recordedAt) };
|
||||
var ranges = BuildVersionRanges(package, recordedAt);
|
||||
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Deb,
|
||||
identifier: package.Package.Trim(),
|
||||
platform: package.Release,
|
||||
versionRanges: ranges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: provenance,
|
||||
normalizedVersions: normalizedVersions));
|
||||
var provenance = new[] { BuildPackageProvenance(package, recordedAt) };
|
||||
var ranges = BuildVersionRanges(package, recordedAt);
|
||||
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Deb,
|
||||
identifier: package.Package.Trim(),
|
||||
platform: package.Release,
|
||||
versionRanges: ranges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: provenance,
|
||||
normalizedVersions: normalizedVersions));
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(DebianPackageStateDto package, DateTimeOffset recordedAt)
|
||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(DebianPackageStateDto package, DateTimeOffset recordedAt)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
DebianConnectorPlugin.SourceName,
|
||||
@@ -255,40 +255,40 @@ internal static class DebianMapper
|
||||
parts.Add($"last:{lastAffected.Trim()}");
|
||||
}
|
||||
|
||||
return parts.Count == 0 ? null : string.Join(" ", parts);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||
DebianPackageStateDto package,
|
||||
IReadOnlyList<AffectedVersionRange> ranges)
|
||||
{
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return Array.Empty<NormalizedVersionRule>();
|
||||
}
|
||||
|
||||
var note = string.IsNullOrWhiteSpace(package.Release)
|
||||
? null
|
||||
: $"debian:{package.Release.Trim()}";
|
||||
|
||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(note);
|
||||
if (rule is not null)
|
||||
{
|
||||
rules.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
||||
}
|
||||
|
||||
private static void AddExtension(IDictionary<string, string> extensions, string key, string? value)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
extensions[key] = value.Trim();
|
||||
return parts.Count == 0 ? null : string.Join(" ", parts);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||
DebianPackageStateDto package,
|
||||
IReadOnlyList<AffectedVersionRange> ranges)
|
||||
{
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return Array.Empty<NormalizedVersionRule>();
|
||||
}
|
||||
|
||||
var note = string.IsNullOrWhiteSpace(package.Release)
|
||||
? null
|
||||
: $"debian:{package.Release.Trim()}";
|
||||
|
||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(note);
|
||||
if (rule is not null)
|
||||
{
|
||||
rules.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
||||
}
|
||||
|
||||
private static void AddExtension(IDictionary<string, string> extensions, string key, string? value)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
extensions[key] = value.Trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,8 +9,8 @@ using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Normalization.Distro;
|
||||
using StellaOps.Concelier.Normalization.Identifiers;
|
||||
using StellaOps.Concelier.Normalization.Text;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Distro.RedHat.Configuration;
|
||||
using StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.RedHat;
|
||||
|
||||
@@ -7,7 +7,7 @@ internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastMod
|
||||
{
|
||||
public static SuseFetchCacheEntry Empty { get; } = new(null, null);
|
||||
|
||||
public static SuseFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
||||
public static SuseFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||
=> new(document.Etag, document.LastModified);
|
||||
|
||||
public static SuseFetchCacheEntry FromBson(BsonDocument document)
|
||||
@@ -54,7 +54,7 @@ internal sealed record SuseFetchCacheEntry(string? ETag, DateTimeOffset? LastMod
|
||||
return document;
|
||||
}
|
||||
|
||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
|
||||
@@ -4,7 +4,7 @@ using System.Globalization;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Distro;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.Suse.Internal;
|
||||
|
||||
@@ -160,16 +160,16 @@ internal static class SuseMapper
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Rpm,
|
||||
identifier: nevra!.ToCanonicalString(),
|
||||
platform: package.Platform,
|
||||
versionRanges: ranges,
|
||||
statuses: BuildStatuses(package, affectedProvenance),
|
||||
provenance: new[] { affectedProvenance },
|
||||
normalizedVersions: normalizedVersions));
|
||||
var normalizedVersions = BuildNormalizedVersions(package, ranges);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Rpm,
|
||||
identifier: nevra!.ToCanonicalString(),
|
||||
platform: package.Platform,
|
||||
versionRanges: ranges,
|
||||
statuses: BuildStatuses(package, affectedProvenance),
|
||||
provenance: new[] { affectedProvenance },
|
||||
normalizedVersions: normalizedVersions));
|
||||
}
|
||||
|
||||
return packages.Count == 0
|
||||
@@ -293,7 +293,7 @@ internal static class SuseMapper
|
||||
return !string.IsNullOrWhiteSpace(version) && !string.IsNullOrWhiteSpace(release);
|
||||
}
|
||||
|
||||
private static string? BuildRangeExpression(string? introduced, string? fixedVersion, string? lastAffected)
|
||||
private static string? BuildRangeExpression(string? introduced, string? fixedVersion, string? lastAffected)
|
||||
{
|
||||
var parts = new List<string>(3);
|
||||
if (!string.IsNullOrWhiteSpace(introduced))
|
||||
@@ -311,32 +311,32 @@ internal static class SuseMapper
|
||||
parts.Add($"last:{lastAffected}");
|
||||
}
|
||||
|
||||
return parts.Count == 0 ? null : string.Join(" ", parts);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||
SusePackageStateDto package,
|
||||
IReadOnlyList<AffectedVersionRange> ranges)
|
||||
{
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return Array.Empty<NormalizedVersionRule>();
|
||||
}
|
||||
|
||||
var note = string.IsNullOrWhiteSpace(package.Platform)
|
||||
? null
|
||||
: $"suse:{package.Platform.Trim()}";
|
||||
|
||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(note);
|
||||
if (rule is not null)
|
||||
{
|
||||
rules.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
||||
}
|
||||
}
|
||||
return parts.Count == 0 ? null : string.Join(" ", parts);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NormalizedVersionRule> BuildNormalizedVersions(
|
||||
SusePackageStateDto package,
|
||||
IReadOnlyList<AffectedVersionRange> ranges)
|
||||
{
|
||||
if (ranges.Count == 0)
|
||||
{
|
||||
return Array.Empty<NormalizedVersionRule>();
|
||||
}
|
||||
|
||||
var note = string.IsNullOrWhiteSpace(package.Platform)
|
||||
? null
|
||||
: $"suse:{package.Platform.Trim()}";
|
||||
|
||||
var rules = new List<NormalizedVersionRule>(ranges.Count);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(note);
|
||||
if (rule is not null)
|
||||
{
|
||||
rules.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return rules.Count == 0 ? Array.Empty<NormalizedVersionRule>() : rules;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,8 +18,8 @@ using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
|
||||
using StellaOps.Concelier.Connector.Distro.Suse.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.Suse;
|
||||
|
||||
@@ -7,7 +7,7 @@ internal sealed record UbuntuFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
||||
{
|
||||
public static UbuntuFetchCacheEntry Empty { get; } = new(null, null);
|
||||
|
||||
public static UbuntuFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
||||
public static UbuntuFetchCacheEntry FromDocument(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||
=> new(document.Etag, document.LastModified);
|
||||
|
||||
public static UbuntuFetchCacheEntry FromBson(BsonDocument document)
|
||||
@@ -54,7 +54,7 @@ internal sealed record UbuntuFetchCacheEntry(string? ETag, DateTimeOffset? LastM
|
||||
return doc;
|
||||
}
|
||||
|
||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.Documents.DocumentRecord document)
|
||||
public bool Matches(StellaOps.Concelier.Storage.Mongo.DocumentRecord document)
|
||||
{
|
||||
if (document is null)
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Distro;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
|
||||
|
||||
@@ -162,11 +162,11 @@ internal static class UbuntuMapper
|
||||
["ubuntu.pocket"] = package.Pocket ?? string.Empty
|
||||
};
|
||||
|
||||
var range = new AffectedVersionRange(
|
||||
rangeKind: "evr",
|
||||
introducedVersion: null,
|
||||
fixedVersion: package.Version,
|
||||
lastAffectedVersion: null,
|
||||
var range = new AffectedVersionRange(
|
||||
rangeKind: "evr",
|
||||
introducedVersion: null,
|
||||
fixedVersion: package.Version,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: rangeExpression,
|
||||
provenance: rangeProvenance,
|
||||
primitives: new RangePrimitives(
|
||||
@@ -178,27 +178,27 @@ internal static class UbuntuMapper
|
||||
LastAffected: null),
|
||||
VendorExtensions: extensions));
|
||||
|
||||
var statuses = new[]
|
||||
{
|
||||
new AffectedPackageStatus(DetermineStatus(package), provenance)
|
||||
};
|
||||
|
||||
var normalizedNote = string.IsNullOrWhiteSpace(package.Release)
|
||||
? null
|
||||
: $"ubuntu:{package.Release.Trim()}";
|
||||
var normalizedRule = range.ToNormalizedVersionRule(normalizedNote);
|
||||
var normalizedVersions = normalizedRule is null
|
||||
? Array.Empty<NormalizedVersionRule>()
|
||||
: new[] { normalizedRule };
|
||||
|
||||
list.Add(new AffectedPackage(
|
||||
type: AffectedPackageTypes.Deb,
|
||||
identifier: package.Package,
|
||||
platform: package.Release,
|
||||
versionRanges: new[] { range },
|
||||
statuses: statuses,
|
||||
provenance: new[] { provenance },
|
||||
normalizedVersions: normalizedVersions));
|
||||
var statuses = new[]
|
||||
{
|
||||
new AffectedPackageStatus(DetermineStatus(package), provenance)
|
||||
};
|
||||
|
||||
var normalizedNote = string.IsNullOrWhiteSpace(package.Release)
|
||||
? null
|
||||
: $"ubuntu:{package.Release.Trim()}";
|
||||
var normalizedRule = range.ToNormalizedVersionRule(normalizedNote);
|
||||
var normalizedVersions = normalizedRule is null
|
||||
? Array.Empty<NormalizedVersionRule>()
|
||||
: new[] { normalizedRule };
|
||||
|
||||
list.Add(new AffectedPackage(
|
||||
type: AffectedPackageTypes.Deb,
|
||||
identifier: package.Package,
|
||||
platform: package.Release,
|
||||
versionRanges: new[] { range },
|
||||
statuses: statuses,
|
||||
provenance: new[] { provenance },
|
||||
normalizedVersions: normalizedVersions));
|
||||
}
|
||||
|
||||
return list.Count == 0
|
||||
|
||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Distro.Ubuntu.Configuration;
|
||||
using StellaOps.Concelier.Connector.Distro.Ubuntu.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Ghsa.Configuration;
|
||||
using StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Ghsa;
|
||||
|
||||
@@ -5,7 +5,7 @@ using System.Text;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Ghsa.Internal;
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ using StellaOps.Concelier.Connector.Ics.Cisa.Configuration;
|
||||
using StellaOps.Concelier.Connector.Ics.Cisa.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Ics.Kaspersky.Configuration;
|
||||
using StellaOps.Concelier.Connector.Ics.Kaspersky.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Ics.Kaspersky;
|
||||
|
||||
@@ -6,8 +6,8 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Normalization.Identifiers;
|
||||
using StellaOps.Concelier.Normalization.Text;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.JpFlags;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Jvn.Internal;
|
||||
@@ -156,12 +156,12 @@ internal static class JvnAdvisoryMapper
|
||||
{
|
||||
var packages = new List<AffectedPackage>();
|
||||
|
||||
foreach (var product in detail.Affected)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(product.Cpe))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
foreach (var product in detail.Affected)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(product.Cpe))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Status) && !product.Status.StartsWith("vulnerable", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
@@ -200,66 +200,66 @@ internal static class JvnAdvisoryMapper
|
||||
|
||||
var platform = product.Vendor ?? product.CpeVendor;
|
||||
|
||||
var versionRanges = BuildVersionRanges(product, recordedAt, provenance[0]);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Cpe,
|
||||
cpe!,
|
||||
platform: platform,
|
||||
versionRanges: versionRanges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: provenance.ToArray()));
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(JvnAffectedProductDto product, DateTimeOffset recordedAt, AdvisoryProvenance provenance)
|
||||
{
|
||||
var extensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (!string.IsNullOrWhiteSpace(product.Version))
|
||||
{
|
||||
extensions["jvn.version"] = product.Version!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Build))
|
||||
{
|
||||
extensions["jvn.build"] = product.Build!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Description))
|
||||
{
|
||||
extensions["jvn.description"] = product.Description!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Status))
|
||||
{
|
||||
extensions["jvn.status"] = product.Status!;
|
||||
}
|
||||
|
||||
if (extensions.Count == 0)
|
||||
{
|
||||
return Array.Empty<AffectedVersionRange>();
|
||||
}
|
||||
|
||||
var primitives = new RangePrimitives(
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
extensions);
|
||||
|
||||
var expression = product.Version;
|
||||
var range = new AffectedVersionRange(
|
||||
rangeKind: "cpe",
|
||||
introducedVersion: null,
|
||||
fixedVersion: null,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: string.IsNullOrWhiteSpace(expression) ? null : expression,
|
||||
provenance: provenance,
|
||||
primitives: primitives);
|
||||
|
||||
return new[] { range };
|
||||
}
|
||||
var versionRanges = BuildVersionRanges(product, recordedAt, provenance[0]);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Cpe,
|
||||
cpe!,
|
||||
platform: platform,
|
||||
versionRanges: versionRanges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: provenance.ToArray()));
|
||||
}
|
||||
|
||||
return packages;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedVersionRange> BuildVersionRanges(JvnAffectedProductDto product, DateTimeOffset recordedAt, AdvisoryProvenance provenance)
|
||||
{
|
||||
var extensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (!string.IsNullOrWhiteSpace(product.Version))
|
||||
{
|
||||
extensions["jvn.version"] = product.Version!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Build))
|
||||
{
|
||||
extensions["jvn.build"] = product.Build!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Description))
|
||||
{
|
||||
extensions["jvn.description"] = product.Description!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(product.Status))
|
||||
{
|
||||
extensions["jvn.status"] = product.Status!;
|
||||
}
|
||||
|
||||
if (extensions.Count == 0)
|
||||
{
|
||||
return Array.Empty<AffectedVersionRange>();
|
||||
}
|
||||
|
||||
var primitives = new RangePrimitives(
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
extensions);
|
||||
|
||||
var expression = product.Version;
|
||||
var range = new AffectedVersionRange(
|
||||
rangeKind: "cpe",
|
||||
introducedVersion: null,
|
||||
fixedVersion: null,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: string.IsNullOrWhiteSpace(expression) ? null : expression,
|
||||
provenance: provenance,
|
||||
primitives: primitives);
|
||||
|
||||
return new[] { range };
|
||||
}
|
||||
|
||||
private static IReadOnlyList<CvssMetric> BuildCvss(JvnDetailDto detail, DateTimeOffset recordedAt, out string? severity)
|
||||
{
|
||||
|
||||
@@ -11,8 +11,8 @@ using StellaOps.Concelier.Connector.Jvn.Configuration;
|
||||
using StellaOps.Concelier.Connector.Jvn.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.JpFlags;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
|
||||
@@ -16,8 +16,8 @@ using StellaOps.Concelier.Connector.Kev.Configuration;
|
||||
using StellaOps.Concelier.Connector.Kev.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Kev;
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Kisa.Internal;
|
||||
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Kisa.Internal;
|
||||
|
||||
internal static class KisaMapper
|
||||
{
|
||||
public static Advisory Map(KisaParsedAdvisory dto, DocumentRecord document, DateTimeOffset recordedAt)
|
||||
@@ -97,410 +97,410 @@ internal static class KisaMapper
|
||||
}
|
||||
|
||||
var packages = new List<AffectedPackage>(dto.Products.Count);
|
||||
foreach (var product in dto.Products)
|
||||
{
|
||||
var vendor = string.IsNullOrWhiteSpace(product.Vendor) ? "Unknown" : product.Vendor!;
|
||||
var name = product.Name;
|
||||
var identifier = string.IsNullOrWhiteSpace(name) ? vendor : $"{vendor} {name}";
|
||||
var normalizedIdentifier = CreateSlug(identifier);
|
||||
var rangeProvenanceKey = $"kisa:{dto.AdvisoryId}:{normalizedIdentifier}";
|
||||
|
||||
var artifacts = BuildVersionArtifacts(product, rangeProvenanceKey, recordedAt);
|
||||
var fieldMasks = new HashSet<string>(StringComparer.Ordinal)
|
||||
{
|
||||
ProvenanceFieldMasks.AffectedPackages
|
||||
};
|
||||
|
||||
if (artifacts.Ranges.Count > 0)
|
||||
{
|
||||
fieldMasks.Add(ProvenanceFieldMasks.VersionRanges);
|
||||
}
|
||||
|
||||
if (artifacts.NormalizedVersions.Count > 0)
|
||||
{
|
||||
fieldMasks.Add(ProvenanceFieldMasks.NormalizedVersions);
|
||||
}
|
||||
|
||||
var packageProvenance = new AdvisoryProvenance(
|
||||
KisaConnectorPlugin.SourceName,
|
||||
"package",
|
||||
identifier,
|
||||
recordedAt,
|
||||
fieldMasks);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Vendor,
|
||||
identifier,
|
||||
platform: null,
|
||||
versionRanges: artifacts.Ranges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { packageProvenance },
|
||||
normalizedVersions: artifacts.NormalizedVersions));
|
||||
}
|
||||
|
||||
return packages
|
||||
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static (IReadOnlyList<AffectedVersionRange> Ranges, IReadOnlyList<NormalizedVersionRule> NormalizedVersions) BuildVersionArtifacts(
|
||||
KisaParsedProduct product,
|
||||
string provenanceValue,
|
||||
DateTimeOffset recordedAt)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(product.Versions))
|
||||
{
|
||||
var fallback = CreateFallbackRange(product.Versions ?? string.Empty, provenanceValue, recordedAt);
|
||||
return (new[] { fallback }, Array.Empty<NormalizedVersionRule>());
|
||||
}
|
||||
|
||||
var segment = product.Versions.Trim();
|
||||
var result = ParseRangeSegment(segment, provenanceValue, recordedAt);
|
||||
|
||||
var ranges = new[] { result.Range };
|
||||
var normalized = result.NormalizedRule is null
|
||||
? Array.Empty<NormalizedVersionRule>()
|
||||
: new[] { result.NormalizedRule };
|
||||
|
||||
return (ranges, normalized);
|
||||
}
|
||||
|
||||
private static (AffectedVersionRange Range, NormalizedVersionRule? NormalizedRule) ParseRangeSegment(
|
||||
string segment,
|
||||
string provenanceValue,
|
||||
DateTimeOffset recordedAt)
|
||||
{
|
||||
var trimmed = segment.Trim();
|
||||
if (trimmed.Length == 0)
|
||||
{
|
||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||
}
|
||||
|
||||
var matches = VersionPattern.Matches(trimmed);
|
||||
if (matches.Count == 0)
|
||||
{
|
||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||
}
|
||||
|
||||
var startMatch = matches[0];
|
||||
var startVersion = startMatch.Value;
|
||||
string? endVersion = matches.Count > 1 ? matches[1].Value : null;
|
||||
|
||||
var prefix = trimmed[..startMatch.Index].Trim();
|
||||
var startContext = ExtractSpan(trimmed, startMatch.Index + startMatch.Length, endVersion is not null ? matches[1].Index : trimmed.Length).Trim();
|
||||
var endContext = endVersion is not null
|
||||
? trimmed[(matches[1].Index + matches[1].Length)..].Trim()
|
||||
: string.Empty;
|
||||
|
||||
var introducedInclusive = DetermineStartInclusivity(prefix, startContext, trimmed);
|
||||
var endContextForInclusivity = endVersion is not null ? endContext : startContext;
|
||||
var fixedInclusive = DetermineEndInclusivity(endContextForInclusivity, trimmed);
|
||||
|
||||
var hasInclusiveLowerMarker = ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(startContext, InclusiveStartMarkers);
|
||||
var hasExclusiveLowerMarker = ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(startContext, ExclusiveStartMarkers);
|
||||
var hasInclusiveUpperMarker = ContainsAny(startContext, InclusiveEndMarkers) || ContainsAny(endContext, InclusiveEndMarkers);
|
||||
var hasExclusiveUpperMarker = ContainsAny(startContext, ExclusiveEndMarkers) || ContainsAny(endContext, ExclusiveEndMarkers);
|
||||
var hasUpperMarker = hasInclusiveUpperMarker || hasExclusiveUpperMarker;
|
||||
var hasLowerMarker = hasInclusiveLowerMarker || hasExclusiveLowerMarker;
|
||||
|
||||
var introducedNormalized = TryFormatSemVer(startVersion);
|
||||
var fixedNormalized = endVersion is not null ? TryFormatSemVer(endVersion) : null;
|
||||
|
||||
if (introducedNormalized is null || (endVersion is not null && fixedNormalized is null))
|
||||
{
|
||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||
}
|
||||
|
||||
var coercedUpperOnly = endVersion is null && hasUpperMarker && !hasLowerMarker;
|
||||
|
||||
if (coercedUpperOnly)
|
||||
{
|
||||
fixedNormalized = introducedNormalized;
|
||||
introducedNormalized = null;
|
||||
fixedInclusive = hasInclusiveUpperMarker && !hasExclusiveUpperMarker;
|
||||
}
|
||||
|
||||
var constraintExpression = BuildConstraintExpression(
|
||||
introducedNormalized,
|
||||
introducedInclusive,
|
||||
fixedNormalized,
|
||||
fixedInclusive);
|
||||
|
||||
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["kisa.range.raw"] = trimmed,
|
||||
["kisa.version.start.raw"] = startVersion
|
||||
};
|
||||
|
||||
if (introducedNormalized is not null)
|
||||
{
|
||||
vendorExtensions["kisa.version.start.normalized"] = introducedNormalized;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(prefix))
|
||||
{
|
||||
vendorExtensions["kisa.range.prefix"] = prefix;
|
||||
}
|
||||
|
||||
if (coercedUpperOnly)
|
||||
{
|
||||
vendorExtensions["kisa.version.end.raw"] = startVersion;
|
||||
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
||||
}
|
||||
|
||||
if (endVersion is not null)
|
||||
{
|
||||
vendorExtensions["kisa.version.end.raw"] = endVersion;
|
||||
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(startContext))
|
||||
{
|
||||
vendorExtensions["kisa.range.start.context"] = startContext;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(endContext))
|
||||
{
|
||||
vendorExtensions["kisa.range.end.context"] = endContext;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(constraintExpression))
|
||||
{
|
||||
vendorExtensions["kisa.range.normalized"] = constraintExpression!;
|
||||
}
|
||||
|
||||
var semVerPrimitive = new SemVerPrimitive(
|
||||
Introduced: introducedNormalized,
|
||||
IntroducedInclusive: introducedInclusive,
|
||||
Fixed: fixedNormalized,
|
||||
FixedInclusive: fixedInclusive,
|
||||
LastAffected: fixedNormalized,
|
||||
LastAffectedInclusive: fixedNormalized is not null ? fixedInclusive : introducedInclusive,
|
||||
ConstraintExpression: constraintExpression,
|
||||
ExactValue: fixedNormalized is null && string.IsNullOrWhiteSpace(constraintExpression) ? introducedNormalized : null);
|
||||
|
||||
var range = new AffectedVersionRange(
|
||||
rangeKind: "product",
|
||||
introducedVersion: semVerPrimitive.Introduced,
|
||||
fixedVersion: semVerPrimitive.Fixed,
|
||||
lastAffectedVersion: semVerPrimitive.LastAffected,
|
||||
rangeExpression: trimmed,
|
||||
provenance: new AdvisoryProvenance(
|
||||
KisaConnectorPlugin.SourceName,
|
||||
"package-range",
|
||||
provenanceValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges }),
|
||||
primitives: new RangePrimitives(semVerPrimitive, null, null, vendorExtensions));
|
||||
|
||||
var normalizedRule = semVerPrimitive.ToNormalizedVersionRule(provenanceValue);
|
||||
return (range, normalizedRule);
|
||||
}
|
||||
|
||||
private static AffectedVersionRange CreateFallbackRange(string raw, string provenanceValue, DateTimeOffset recordedAt)
|
||||
{
|
||||
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (!string.IsNullOrWhiteSpace(raw))
|
||||
{
|
||||
vendorExtensions["kisa.range.raw"] = raw.Trim();
|
||||
}
|
||||
|
||||
return new AffectedVersionRange(
|
||||
rangeKind: "string",
|
||||
introducedVersion: null,
|
||||
fixedVersion: null,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: raw,
|
||||
provenance: new AdvisoryProvenance(
|
||||
KisaConnectorPlugin.SourceName,
|
||||
"package-range",
|
||||
provenanceValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges }),
|
||||
primitives: new RangePrimitives(null, null, null, vendorExtensions));
|
||||
}
|
||||
|
||||
private static string ExtractSpan(string source, int start, int end)
|
||||
{
|
||||
if (start >= end || start >= source.Length)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
end = Math.Min(end, source.Length);
|
||||
return source[start..end];
|
||||
}
|
||||
|
||||
private static string? TryFormatSemVer(string version)
|
||||
{
|
||||
var segments = version.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
if (segments.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!TryParseInt(segments[0], out var major))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var minor = segments.Length > 1 && TryParseInt(segments[1], out var minorValue) ? minorValue : 0;
|
||||
var patch = segments.Length > 2 && TryParseInt(segments[2], out var patchValue) ? patchValue : 0;
|
||||
var baseVersion = $"{major}.{minor}.{patch}";
|
||||
|
||||
if (segments.Length <= 3)
|
||||
{
|
||||
return baseVersion;
|
||||
}
|
||||
|
||||
var extraIdentifiers = segments
|
||||
.Skip(3)
|
||||
.Select(TrimLeadingZeros)
|
||||
.Where(static part => part.Length > 0)
|
||||
.ToArray();
|
||||
|
||||
if (extraIdentifiers.Length == 0)
|
||||
{
|
||||
extraIdentifiers = new[] { "0" };
|
||||
}
|
||||
|
||||
var allIdentifiers = new[] { "fw" }.Concat(extraIdentifiers);
|
||||
return $"{baseVersion}-{string.Join('.', allIdentifiers)}";
|
||||
}
|
||||
|
||||
private static string TrimLeadingZeros(string value)
|
||||
{
|
||||
var trimmed = value.TrimStart('0');
|
||||
return trimmed.Length == 0 ? "0" : trimmed;
|
||||
}
|
||||
|
||||
private static bool TryParseInt(string value, out int result)
|
||||
=> int.TryParse(value.Trim(), out result);
|
||||
|
||||
private static bool DetermineStartInclusivity(string prefix, string context, string fullSegment)
|
||||
{
|
||||
if (ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(context, ExclusiveStartMarkers))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(context, InclusiveStartMarkers))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool DetermineEndInclusivity(string context, string fullSegment)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(context))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ContainsAny(context, ExclusiveEndMarkers))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ContainsAny(context, InclusiveEndMarkers))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string? BuildConstraintExpression(
|
||||
string? introduced,
|
||||
bool introducedInclusive,
|
||||
string? fixedVersion,
|
||||
bool fixedInclusive)
|
||||
{
|
||||
var segments = new List<string>(capacity: 2);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(introduced))
|
||||
{
|
||||
segments.Add($"{(introducedInclusive ? ">=" : ">")} {introduced}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(fixedVersion))
|
||||
{
|
||||
segments.Add($"{(fixedInclusive ? "<=" : "<")} {fixedVersion}");
|
||||
}
|
||||
|
||||
return segments.Count == 0 ? null : string.Join(" ", segments);
|
||||
}
|
||||
|
||||
private static bool ContainsAny(string? value, IReadOnlyCollection<string> markers)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var marker in markers)
|
||||
{
|
||||
if (value.Contains(marker, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string CreateSlug(string value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return "kisa-product";
|
||||
}
|
||||
|
||||
Span<char> buffer = stackalloc char[value.Length];
|
||||
var index = 0;
|
||||
foreach (var ch in value.ToLowerInvariant())
|
||||
{
|
||||
if (char.IsLetterOrDigit(ch))
|
||||
{
|
||||
buffer[index++] = ch;
|
||||
}
|
||||
else if (char.IsWhiteSpace(ch) || ch is '-' or '_' or '.' or '/')
|
||||
{
|
||||
if (index == 0 || buffer[index - 1] == '-')
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
buffer[index++] = '-';
|
||||
}
|
||||
}
|
||||
|
||||
if (index == 0)
|
||||
{
|
||||
return "kisa-product";
|
||||
}
|
||||
|
||||
var slug = new string(buffer[..index]).Trim('-');
|
||||
return string.IsNullOrWhiteSpace(slug) ? "kisa-product" : slug;
|
||||
}
|
||||
|
||||
private static readonly Regex VersionPattern = new(@"\d+(?:\.\d+){1,3}", RegexOptions.Compiled);
|
||||
|
||||
private static readonly string[] InclusiveStartMarkers = { "이상" };
|
||||
private static readonly string[] ExclusiveStartMarkers = { "초과" };
|
||||
private static readonly string[] InclusiveEndMarkers = { "이하" };
|
||||
private static readonly string[] ExclusiveEndMarkers = { "미만" };
|
||||
}
|
||||
foreach (var product in dto.Products)
|
||||
{
|
||||
var vendor = string.IsNullOrWhiteSpace(product.Vendor) ? "Unknown" : product.Vendor!;
|
||||
var name = product.Name;
|
||||
var identifier = string.IsNullOrWhiteSpace(name) ? vendor : $"{vendor} {name}";
|
||||
var normalizedIdentifier = CreateSlug(identifier);
|
||||
var rangeProvenanceKey = $"kisa:{dto.AdvisoryId}:{normalizedIdentifier}";
|
||||
|
||||
var artifacts = BuildVersionArtifacts(product, rangeProvenanceKey, recordedAt);
|
||||
var fieldMasks = new HashSet<string>(StringComparer.Ordinal)
|
||||
{
|
||||
ProvenanceFieldMasks.AffectedPackages
|
||||
};
|
||||
|
||||
if (artifacts.Ranges.Count > 0)
|
||||
{
|
||||
fieldMasks.Add(ProvenanceFieldMasks.VersionRanges);
|
||||
}
|
||||
|
||||
if (artifacts.NormalizedVersions.Count > 0)
|
||||
{
|
||||
fieldMasks.Add(ProvenanceFieldMasks.NormalizedVersions);
|
||||
}
|
||||
|
||||
var packageProvenance = new AdvisoryProvenance(
|
||||
KisaConnectorPlugin.SourceName,
|
||||
"package",
|
||||
identifier,
|
||||
recordedAt,
|
||||
fieldMasks);
|
||||
|
||||
packages.Add(new AffectedPackage(
|
||||
AffectedPackageTypes.Vendor,
|
||||
identifier,
|
||||
platform: null,
|
||||
versionRanges: artifacts.Ranges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { packageProvenance },
|
||||
normalizedVersions: artifacts.NormalizedVersions));
|
||||
}
|
||||
|
||||
return packages
|
||||
.DistinctBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(static package => package.Identifier, StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static (IReadOnlyList<AffectedVersionRange> Ranges, IReadOnlyList<NormalizedVersionRule> NormalizedVersions) BuildVersionArtifacts(
|
||||
KisaParsedProduct product,
|
||||
string provenanceValue,
|
||||
DateTimeOffset recordedAt)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(product.Versions))
|
||||
{
|
||||
var fallback = CreateFallbackRange(product.Versions ?? string.Empty, provenanceValue, recordedAt);
|
||||
return (new[] { fallback }, Array.Empty<NormalizedVersionRule>());
|
||||
}
|
||||
|
||||
var segment = product.Versions.Trim();
|
||||
var result = ParseRangeSegment(segment, provenanceValue, recordedAt);
|
||||
|
||||
var ranges = new[] { result.Range };
|
||||
var normalized = result.NormalizedRule is null
|
||||
? Array.Empty<NormalizedVersionRule>()
|
||||
: new[] { result.NormalizedRule };
|
||||
|
||||
return (ranges, normalized);
|
||||
}
|
||||
|
||||
private static (AffectedVersionRange Range, NormalizedVersionRule? NormalizedRule) ParseRangeSegment(
|
||||
string segment,
|
||||
string provenanceValue,
|
||||
DateTimeOffset recordedAt)
|
||||
{
|
||||
var trimmed = segment.Trim();
|
||||
if (trimmed.Length == 0)
|
||||
{
|
||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||
}
|
||||
|
||||
var matches = VersionPattern.Matches(trimmed);
|
||||
if (matches.Count == 0)
|
||||
{
|
||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||
}
|
||||
|
||||
var startMatch = matches[0];
|
||||
var startVersion = startMatch.Value;
|
||||
string? endVersion = matches.Count > 1 ? matches[1].Value : null;
|
||||
|
||||
var prefix = trimmed[..startMatch.Index].Trim();
|
||||
var startContext = ExtractSpan(trimmed, startMatch.Index + startMatch.Length, endVersion is not null ? matches[1].Index : trimmed.Length).Trim();
|
||||
var endContext = endVersion is not null
|
||||
? trimmed[(matches[1].Index + matches[1].Length)..].Trim()
|
||||
: string.Empty;
|
||||
|
||||
var introducedInclusive = DetermineStartInclusivity(prefix, startContext, trimmed);
|
||||
var endContextForInclusivity = endVersion is not null ? endContext : startContext;
|
||||
var fixedInclusive = DetermineEndInclusivity(endContextForInclusivity, trimmed);
|
||||
|
||||
var hasInclusiveLowerMarker = ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(startContext, InclusiveStartMarkers);
|
||||
var hasExclusiveLowerMarker = ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(startContext, ExclusiveStartMarkers);
|
||||
var hasInclusiveUpperMarker = ContainsAny(startContext, InclusiveEndMarkers) || ContainsAny(endContext, InclusiveEndMarkers);
|
||||
var hasExclusiveUpperMarker = ContainsAny(startContext, ExclusiveEndMarkers) || ContainsAny(endContext, ExclusiveEndMarkers);
|
||||
var hasUpperMarker = hasInclusiveUpperMarker || hasExclusiveUpperMarker;
|
||||
var hasLowerMarker = hasInclusiveLowerMarker || hasExclusiveLowerMarker;
|
||||
|
||||
var introducedNormalized = TryFormatSemVer(startVersion);
|
||||
var fixedNormalized = endVersion is not null ? TryFormatSemVer(endVersion) : null;
|
||||
|
||||
if (introducedNormalized is null || (endVersion is not null && fixedNormalized is null))
|
||||
{
|
||||
return (CreateFallbackRange(segment, provenanceValue, recordedAt), null);
|
||||
}
|
||||
|
||||
var coercedUpperOnly = endVersion is null && hasUpperMarker && !hasLowerMarker;
|
||||
|
||||
if (coercedUpperOnly)
|
||||
{
|
||||
fixedNormalized = introducedNormalized;
|
||||
introducedNormalized = null;
|
||||
fixedInclusive = hasInclusiveUpperMarker && !hasExclusiveUpperMarker;
|
||||
}
|
||||
|
||||
var constraintExpression = BuildConstraintExpression(
|
||||
introducedNormalized,
|
||||
introducedInclusive,
|
||||
fixedNormalized,
|
||||
fixedInclusive);
|
||||
|
||||
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["kisa.range.raw"] = trimmed,
|
||||
["kisa.version.start.raw"] = startVersion
|
||||
};
|
||||
|
||||
if (introducedNormalized is not null)
|
||||
{
|
||||
vendorExtensions["kisa.version.start.normalized"] = introducedNormalized;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(prefix))
|
||||
{
|
||||
vendorExtensions["kisa.range.prefix"] = prefix;
|
||||
}
|
||||
|
||||
if (coercedUpperOnly)
|
||||
{
|
||||
vendorExtensions["kisa.version.end.raw"] = startVersion;
|
||||
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
||||
}
|
||||
|
||||
if (endVersion is not null)
|
||||
{
|
||||
vendorExtensions["kisa.version.end.raw"] = endVersion;
|
||||
vendorExtensions["kisa.version.end.normalized"] = fixedNormalized!;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(startContext))
|
||||
{
|
||||
vendorExtensions["kisa.range.start.context"] = startContext;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(endContext))
|
||||
{
|
||||
vendorExtensions["kisa.range.end.context"] = endContext;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(constraintExpression))
|
||||
{
|
||||
vendorExtensions["kisa.range.normalized"] = constraintExpression!;
|
||||
}
|
||||
|
||||
var semVerPrimitive = new SemVerPrimitive(
|
||||
Introduced: introducedNormalized,
|
||||
IntroducedInclusive: introducedInclusive,
|
||||
Fixed: fixedNormalized,
|
||||
FixedInclusive: fixedInclusive,
|
||||
LastAffected: fixedNormalized,
|
||||
LastAffectedInclusive: fixedNormalized is not null ? fixedInclusive : introducedInclusive,
|
||||
ConstraintExpression: constraintExpression,
|
||||
ExactValue: fixedNormalized is null && string.IsNullOrWhiteSpace(constraintExpression) ? introducedNormalized : null);
|
||||
|
||||
var range = new AffectedVersionRange(
|
||||
rangeKind: "product",
|
||||
introducedVersion: semVerPrimitive.Introduced,
|
||||
fixedVersion: semVerPrimitive.Fixed,
|
||||
lastAffectedVersion: semVerPrimitive.LastAffected,
|
||||
rangeExpression: trimmed,
|
||||
provenance: new AdvisoryProvenance(
|
||||
KisaConnectorPlugin.SourceName,
|
||||
"package-range",
|
||||
provenanceValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges }),
|
||||
primitives: new RangePrimitives(semVerPrimitive, null, null, vendorExtensions));
|
||||
|
||||
var normalizedRule = semVerPrimitive.ToNormalizedVersionRule(provenanceValue);
|
||||
return (range, normalizedRule);
|
||||
}
|
||||
|
||||
private static AffectedVersionRange CreateFallbackRange(string raw, string provenanceValue, DateTimeOffset recordedAt)
|
||||
{
|
||||
var vendorExtensions = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (!string.IsNullOrWhiteSpace(raw))
|
||||
{
|
||||
vendorExtensions["kisa.range.raw"] = raw.Trim();
|
||||
}
|
||||
|
||||
return new AffectedVersionRange(
|
||||
rangeKind: "string",
|
||||
introducedVersion: null,
|
||||
fixedVersion: null,
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: raw,
|
||||
provenance: new AdvisoryProvenance(
|
||||
KisaConnectorPlugin.SourceName,
|
||||
"package-range",
|
||||
provenanceValue,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges }),
|
||||
primitives: new RangePrimitives(null, null, null, vendorExtensions));
|
||||
}
|
||||
|
||||
private static string ExtractSpan(string source, int start, int end)
|
||||
{
|
||||
if (start >= end || start >= source.Length)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
end = Math.Min(end, source.Length);
|
||||
return source[start..end];
|
||||
}
|
||||
|
||||
private static string? TryFormatSemVer(string version)
|
||||
{
|
||||
var segments = version.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
if (segments.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!TryParseInt(segments[0], out var major))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var minor = segments.Length > 1 && TryParseInt(segments[1], out var minorValue) ? minorValue : 0;
|
||||
var patch = segments.Length > 2 && TryParseInt(segments[2], out var patchValue) ? patchValue : 0;
|
||||
var baseVersion = $"{major}.{minor}.{patch}";
|
||||
|
||||
if (segments.Length <= 3)
|
||||
{
|
||||
return baseVersion;
|
||||
}
|
||||
|
||||
var extraIdentifiers = segments
|
||||
.Skip(3)
|
||||
.Select(TrimLeadingZeros)
|
||||
.Where(static part => part.Length > 0)
|
||||
.ToArray();
|
||||
|
||||
if (extraIdentifiers.Length == 0)
|
||||
{
|
||||
extraIdentifiers = new[] { "0" };
|
||||
}
|
||||
|
||||
var allIdentifiers = new[] { "fw" }.Concat(extraIdentifiers);
|
||||
return $"{baseVersion}-{string.Join('.', allIdentifiers)}";
|
||||
}
|
||||
|
||||
private static string TrimLeadingZeros(string value)
|
||||
{
|
||||
var trimmed = value.TrimStart('0');
|
||||
return trimmed.Length == 0 ? "0" : trimmed;
|
||||
}
|
||||
|
||||
private static bool TryParseInt(string value, out int result)
|
||||
=> int.TryParse(value.Trim(), out result);
|
||||
|
||||
private static bool DetermineStartInclusivity(string prefix, string context, string fullSegment)
|
||||
{
|
||||
if (ContainsAny(prefix, ExclusiveStartMarkers) || ContainsAny(context, ExclusiveStartMarkers))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ContainsAny(prefix, InclusiveStartMarkers) || ContainsAny(context, InclusiveStartMarkers))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool DetermineEndInclusivity(string context, string fullSegment)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(context))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ContainsAny(context, ExclusiveEndMarkers))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fullSegment.Contains('~', StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (ContainsAny(context, InclusiveEndMarkers))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string? BuildConstraintExpression(
|
||||
string? introduced,
|
||||
bool introducedInclusive,
|
||||
string? fixedVersion,
|
||||
bool fixedInclusive)
|
||||
{
|
||||
var segments = new List<string>(capacity: 2);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(introduced))
|
||||
{
|
||||
segments.Add($"{(introducedInclusive ? ">=" : ">")} {introduced}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(fixedVersion))
|
||||
{
|
||||
segments.Add($"{(fixedInclusive ? "<=" : "<")} {fixedVersion}");
|
||||
}
|
||||
|
||||
return segments.Count == 0 ? null : string.Join(" ", segments);
|
||||
}
|
||||
|
||||
private static bool ContainsAny(string? value, IReadOnlyCollection<string> markers)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
foreach (var marker in markers)
|
||||
{
|
||||
if (value.Contains(marker, StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static string CreateSlug(string value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return "kisa-product";
|
||||
}
|
||||
|
||||
Span<char> buffer = stackalloc char[value.Length];
|
||||
var index = 0;
|
||||
foreach (var ch in value.ToLowerInvariant())
|
||||
{
|
||||
if (char.IsLetterOrDigit(ch))
|
||||
{
|
||||
buffer[index++] = ch;
|
||||
}
|
||||
else if (char.IsWhiteSpace(ch) || ch is '-' or '_' or '.' or '/')
|
||||
{
|
||||
if (index == 0 || buffer[index - 1] == '-')
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
buffer[index++] = '-';
|
||||
}
|
||||
}
|
||||
|
||||
if (index == 0)
|
||||
{
|
||||
return "kisa-product";
|
||||
}
|
||||
|
||||
var slug = new string(buffer[..index]).Trim('-');
|
||||
return string.IsNullOrWhiteSpace(slug) ? "kisa-product" : slug;
|
||||
}
|
||||
|
||||
private static readonly Regex VersionPattern = new(@"\d+(?:\.\d+){1,3}", RegexOptions.Compiled);
|
||||
|
||||
private static readonly string[] InclusiveStartMarkers = { "이상" };
|
||||
private static readonly string[] ExclusiveStartMarkers = { "초과" };
|
||||
private static readonly string[] InclusiveEndMarkers = { "이하" };
|
||||
private static readonly string[] ExclusiveEndMarkers = { "미만" };
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Kisa.Configuration;
|
||||
using StellaOps.Concelier.Connector.Kisa.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Kisa;
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using NuGet.Versioning;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Identifiers;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Normalization.Text;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using NuGet.Versioning;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Identifiers;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Normalization.Text;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Nvd.Internal;
|
||||
|
||||
@@ -49,30 +49,30 @@ internal static class NvdMapper
|
||||
var modified = TryGetDateTime(cve, "lastModified");
|
||||
var description = GetNormalizedDescription(cve);
|
||||
|
||||
var weaknessMetadata = GetWeaknessMetadata(cve);
|
||||
var references = GetReferences(cve, sourceDocument, recordedAt, weaknessMetadata);
|
||||
var affectedPackages = GetAffectedPackages(cve, cveId, sourceDocument, recordedAt);
|
||||
var cvssMetrics = GetCvssMetrics(cve, sourceDocument, recordedAt, out var severity);
|
||||
var weaknesses = BuildWeaknesses(weaknessMetadata, recordedAt);
|
||||
var canonicalMetricId = cvssMetrics.Count > 0
|
||||
? $"{cvssMetrics[0].Version}|{cvssMetrics[0].Vector}"
|
||||
: null;
|
||||
|
||||
var provenance = new[]
|
||||
{
|
||||
new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"document",
|
||||
sourceDocument.Uri,
|
||||
sourceDocument.FetchedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory }),
|
||||
new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"mapping",
|
||||
string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory }),
|
||||
};
|
||||
var weaknessMetadata = GetWeaknessMetadata(cve);
|
||||
var references = GetReferences(cve, sourceDocument, recordedAt, weaknessMetadata);
|
||||
var affectedPackages = GetAffectedPackages(cve, cveId, sourceDocument, recordedAt);
|
||||
var cvssMetrics = GetCvssMetrics(cve, sourceDocument, recordedAt, out var severity);
|
||||
var weaknesses = BuildWeaknesses(weaknessMetadata, recordedAt);
|
||||
var canonicalMetricId = cvssMetrics.Count > 0
|
||||
? $"{cvssMetrics[0].Version}|{cvssMetrics[0].Vector}"
|
||||
: null;
|
||||
|
||||
var provenance = new[]
|
||||
{
|
||||
new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"document",
|
||||
sourceDocument.Uri,
|
||||
sourceDocument.FetchedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory }),
|
||||
new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"mapping",
|
||||
string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Advisory }),
|
||||
};
|
||||
|
||||
var title = string.IsNullOrWhiteSpace(cveId) ? advisoryKey : cveId;
|
||||
|
||||
@@ -83,24 +83,24 @@ internal static class NvdMapper
|
||||
}
|
||||
|
||||
aliasCandidates.Add(advisoryKey);
|
||||
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: advisoryKey,
|
||||
title: title,
|
||||
summary: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
||||
language: description.Language,
|
||||
published: published,
|
||||
modified: modified,
|
||||
severity: severity,
|
||||
exploitKnown: false,
|
||||
aliases: aliasCandidates,
|
||||
references: references,
|
||||
affectedPackages: affectedPackages,
|
||||
cvssMetrics: cvssMetrics,
|
||||
provenance: provenance,
|
||||
description: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
||||
cwes: weaknesses,
|
||||
canonicalMetricId: canonicalMetricId);
|
||||
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: advisoryKey,
|
||||
title: title,
|
||||
summary: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
||||
language: description.Language,
|
||||
published: published,
|
||||
modified: modified,
|
||||
severity: severity,
|
||||
exploitKnown: false,
|
||||
aliases: aliasCandidates,
|
||||
references: references,
|
||||
affectedPackages: affectedPackages,
|
||||
cvssMetrics: cvssMetrics,
|
||||
provenance: provenance,
|
||||
description: string.IsNullOrEmpty(description.Text) ? null : description.Text,
|
||||
cwes: weaknesses,
|
||||
canonicalMetricId: canonicalMetricId);
|
||||
|
||||
advisories.Add(advisory);
|
||||
index++;
|
||||
@@ -149,22 +149,22 @@ internal static class NvdMapper
|
||||
return DateTimeOffset.TryParse(property.GetString(), out var parsed) ? parsed : null;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AdvisoryReference> GetReferences(
|
||||
JsonElement cve,
|
||||
DocumentRecord document,
|
||||
DateTimeOffset recordedAt,
|
||||
IReadOnlyList<WeaknessMetadata> weaknesses)
|
||||
{
|
||||
var references = new List<AdvisoryReference>();
|
||||
if (!cve.TryGetProperty("references", out var referencesElement) || referencesElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
||||
return references;
|
||||
}
|
||||
|
||||
foreach (var reference in referencesElement.EnumerateArray())
|
||||
{
|
||||
if (!reference.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String)
|
||||
private static IReadOnlyList<AdvisoryReference> GetReferences(
|
||||
JsonElement cve,
|
||||
DocumentRecord document,
|
||||
DateTimeOffset recordedAt,
|
||||
IReadOnlyList<WeaknessMetadata> weaknesses)
|
||||
{
|
||||
var references = new List<AdvisoryReference>();
|
||||
if (!cve.TryGetProperty("references", out var referencesElement) || referencesElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
||||
return references;
|
||||
}
|
||||
|
||||
foreach (var reference in referencesElement.EnumerateArray())
|
||||
{
|
||||
if (!reference.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@@ -187,138 +187,138 @@ internal static class NvdMapper
|
||||
kind: kind,
|
||||
sourceTag: sourceTag,
|
||||
summary: null,
|
||||
provenance: new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"reference",
|
||||
url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References })));
|
||||
}
|
||||
|
||||
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
||||
return references;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<WeaknessMetadata> GetWeaknessMetadata(JsonElement cve)
|
||||
{
|
||||
if (!cve.TryGetProperty("weaknesses", out var weaknesses) || weaknesses.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return Array.Empty<WeaknessMetadata>();
|
||||
}
|
||||
|
||||
var list = new List<WeaknessMetadata>(weaknesses.GetArrayLength());
|
||||
foreach (var weakness in weaknesses.EnumerateArray())
|
||||
{
|
||||
if (!weakness.TryGetProperty("description", out var descriptions) || descriptions.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
string? cweId = null;
|
||||
string? name = null;
|
||||
|
||||
foreach (var description in descriptions.EnumerateArray())
|
||||
{
|
||||
if (description.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!description.TryGetProperty("value", out var valueElement) || valueElement.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = valueElement.GetString();
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
if (trimmed.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
cweId ??= trimmed.ToUpperInvariant();
|
||||
}
|
||||
else
|
||||
{
|
||||
name ??= trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(cweId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(new WeaknessMetadata(cweId, name));
|
||||
}
|
||||
|
||||
return list.Count == 0 ? Array.Empty<WeaknessMetadata>() : list;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AdvisoryWeakness> BuildWeaknesses(IReadOnlyList<WeaknessMetadata> metadata, DateTimeOffset recordedAt)
|
||||
{
|
||||
if (metadata.Count == 0)
|
||||
{
|
||||
return Array.Empty<AdvisoryWeakness>();
|
||||
}
|
||||
|
||||
var list = new List<AdvisoryWeakness>(metadata.Count);
|
||||
foreach (var entry in metadata)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"weakness",
|
||||
entry.CweId,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Weaknesses });
|
||||
|
||||
var provenanceArray = ImmutableArray.Create(provenance);
|
||||
list.Add(new AdvisoryWeakness(
|
||||
taxonomy: "cwe",
|
||||
identifier: entry.CweId,
|
||||
name: entry.Name,
|
||||
uri: BuildCweUrl(entry.CweId),
|
||||
provenance: provenanceArray));
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private static void AppendWeaknessReferences(
|
||||
List<AdvisoryReference> references,
|
||||
IReadOnlyList<WeaknessMetadata> weaknesses,
|
||||
DateTimeOffset recordedAt)
|
||||
{
|
||||
if (weaknesses.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var existing = new HashSet<string>(references.Select(reference => reference.Url), StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var weakness in weaknesses)
|
||||
{
|
||||
var url = BuildCweUrl(weakness.CweId);
|
||||
if (url is null || existing.Contains(url))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var provenance = new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"reference",
|
||||
url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References });
|
||||
|
||||
references.Add(new AdvisoryReference(url, "weakness", weakness.CweId, weakness.Name, provenance));
|
||||
existing.Add(url);
|
||||
}
|
||||
}
|
||||
provenance: new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"reference",
|
||||
url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References })));
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedPackage> GetAffectedPackages(JsonElement cve, string? cveId, DocumentRecord document, DateTimeOffset recordedAt)
|
||||
AppendWeaknessReferences(references, weaknesses, recordedAt);
|
||||
return references;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<WeaknessMetadata> GetWeaknessMetadata(JsonElement cve)
|
||||
{
|
||||
if (!cve.TryGetProperty("weaknesses", out var weaknesses) || weaknesses.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return Array.Empty<WeaknessMetadata>();
|
||||
}
|
||||
|
||||
var list = new List<WeaknessMetadata>(weaknesses.GetArrayLength());
|
||||
foreach (var weakness in weaknesses.EnumerateArray())
|
||||
{
|
||||
if (!weakness.TryGetProperty("description", out var descriptions) || descriptions.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
string? cweId = null;
|
||||
string? name = null;
|
||||
|
||||
foreach (var description in descriptions.EnumerateArray())
|
||||
{
|
||||
if (description.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!description.TryGetProperty("value", out var valueElement) || valueElement.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var value = valueElement.GetString();
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
if (trimmed.StartsWith("CWE-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
cweId ??= trimmed.ToUpperInvariant();
|
||||
}
|
||||
else
|
||||
{
|
||||
name ??= trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(cweId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(new WeaknessMetadata(cweId, name));
|
||||
}
|
||||
|
||||
return list.Count == 0 ? Array.Empty<WeaknessMetadata>() : list;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AdvisoryWeakness> BuildWeaknesses(IReadOnlyList<WeaknessMetadata> metadata, DateTimeOffset recordedAt)
|
||||
{
|
||||
if (metadata.Count == 0)
|
||||
{
|
||||
return Array.Empty<AdvisoryWeakness>();
|
||||
}
|
||||
|
||||
var list = new List<AdvisoryWeakness>(metadata.Count);
|
||||
foreach (var entry in metadata)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"weakness",
|
||||
entry.CweId,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.Weaknesses });
|
||||
|
||||
var provenanceArray = ImmutableArray.Create(provenance);
|
||||
list.Add(new AdvisoryWeakness(
|
||||
taxonomy: "cwe",
|
||||
identifier: entry.CweId,
|
||||
name: entry.Name,
|
||||
uri: BuildCweUrl(entry.CweId),
|
||||
provenance: provenanceArray));
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private static void AppendWeaknessReferences(
|
||||
List<AdvisoryReference> references,
|
||||
IReadOnlyList<WeaknessMetadata> weaknesses,
|
||||
DateTimeOffset recordedAt)
|
||||
{
|
||||
if (weaknesses.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var existing = new HashSet<string>(references.Select(reference => reference.Url), StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var weakness in weaknesses)
|
||||
{
|
||||
var url = BuildCweUrl(weakness.CweId);
|
||||
if (url is null || existing.Contains(url))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var provenance = new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"reference",
|
||||
url,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.References });
|
||||
|
||||
references.Add(new AdvisoryReference(url, "weakness", weakness.CweId, weakness.Name, provenance));
|
||||
existing.Add(url);
|
||||
}
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AffectedPackage> GetAffectedPackages(JsonElement cve, string? cveId, DocumentRecord document, DateTimeOffset recordedAt)
|
||||
{
|
||||
var packages = new Dictionary<string, PackageAccumulator>(StringComparer.Ordinal);
|
||||
if (!cve.TryGetProperty("configurations", out var configurations) || configurations.ValueKind != JsonValueKind.Object)
|
||||
@@ -360,12 +360,12 @@ internal static class NvdMapper
|
||||
? normalizedCpe
|
||||
: criteria.Trim();
|
||||
|
||||
var provenance = new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"cpe",
|
||||
document.Uri,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.AffectedPackages });
|
||||
var provenance = new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"cpe",
|
||||
document.Uri,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.AffectedPackages });
|
||||
if (!packages.TryGetValue(identifier, out var accumulator))
|
||||
{
|
||||
accumulator = new PackageAccumulator();
|
||||
@@ -387,9 +387,9 @@ internal static class NvdMapper
|
||||
return Array.Empty<AffectedPackage>();
|
||||
}
|
||||
|
||||
return packages
|
||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.Select(kvp =>
|
||||
return packages
|
||||
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.Select(kvp =>
|
||||
{
|
||||
var ranges = kvp.Value.Ranges.Count == 0
|
||||
? Array.Empty<AffectedVersionRange>()
|
||||
@@ -404,33 +404,33 @@ internal static class NvdMapper
|
||||
.ThenBy(static p => p.RecordedAt.UtcDateTime)
|
||||
.ToArray();
|
||||
|
||||
var normalizedNote = string.IsNullOrWhiteSpace(cveId)
|
||||
? $"nvd:{document.Id:N}"
|
||||
: $"nvd:{cveId}";
|
||||
|
||||
var normalizedVersions = new List<NormalizedVersionRule>(ranges.Length);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(normalizedNote);
|
||||
if (rule is not null)
|
||||
{
|
||||
normalizedVersions.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return new AffectedPackage(
|
||||
type: AffectedPackageTypes.Cpe,
|
||||
identifier: kvp.Key,
|
||||
platform: null,
|
||||
versionRanges: ranges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: provenance,
|
||||
normalizedVersions: normalizedVersions.Count == 0
|
||||
? Array.Empty<NormalizedVersionRule>()
|
||||
: normalizedVersions.ToArray());
|
||||
})
|
||||
.ToArray();
|
||||
}
|
||||
var normalizedNote = string.IsNullOrWhiteSpace(cveId)
|
||||
? $"nvd:{document.Id:N}"
|
||||
: $"nvd:{cveId}";
|
||||
|
||||
var normalizedVersions = new List<NormalizedVersionRule>(ranges.Length);
|
||||
foreach (var range in ranges)
|
||||
{
|
||||
var rule = range.ToNormalizedVersionRule(normalizedNote);
|
||||
if (rule is not null)
|
||||
{
|
||||
normalizedVersions.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
return new AffectedPackage(
|
||||
type: AffectedPackageTypes.Cpe,
|
||||
identifier: kvp.Key,
|
||||
platform: null,
|
||||
versionRanges: ranges,
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: provenance,
|
||||
normalizedVersions: normalizedVersions.Count == 0
|
||||
? Array.Empty<NormalizedVersionRule>()
|
||||
: normalizedVersions.ToArray());
|
||||
})
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<CvssMetric> GetCvssMetrics(JsonElement cve, DocumentRecord document, DateTimeOffset recordedAt, out string? severity)
|
||||
{
|
||||
@@ -488,12 +488,12 @@ internal static class NvdMapper
|
||||
|
||||
severity ??= normalized.BaseSeverity;
|
||||
|
||||
list.Add(normalized.ToModel(new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"cvss",
|
||||
normalized.Vector,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.CvssMetrics })));
|
||||
list.Add(normalized.ToModel(new AdvisoryProvenance(
|
||||
NvdConnectorPlugin.SourceName,
|
||||
"cvss",
|
||||
normalized.Vector,
|
||||
recordedAt,
|
||||
new[] { ProvenanceFieldMasks.CvssMetrics })));
|
||||
}
|
||||
|
||||
if (list.Count > 0)
|
||||
@@ -557,186 +557,186 @@ internal static class NvdMapper
|
||||
vendorExtensions["version"] = version;
|
||||
}
|
||||
|
||||
string? introduced = null;
|
||||
string? fixedVersion = null;
|
||||
string? lastAffected = null;
|
||||
string? exactVersion = null;
|
||||
var expressionParts = new List<string>();
|
||||
|
||||
var introducedInclusive = true;
|
||||
var fixedInclusive = false;
|
||||
var lastInclusive = true;
|
||||
|
||||
if (versionStartIncluding is not null)
|
||||
{
|
||||
introduced = versionStartIncluding;
|
||||
introducedInclusive = true;
|
||||
expressionParts.Add($">={versionStartIncluding}");
|
||||
}
|
||||
|
||||
if (versionStartExcluding is not null)
|
||||
{
|
||||
if (introduced is null)
|
||||
{
|
||||
introduced = versionStartExcluding;
|
||||
introducedInclusive = false;
|
||||
}
|
||||
expressionParts.Add($">{versionStartExcluding}");
|
||||
}
|
||||
|
||||
if (versionEndExcluding is not null)
|
||||
{
|
||||
fixedVersion = versionEndExcluding;
|
||||
fixedInclusive = false;
|
||||
expressionParts.Add($"<{versionEndExcluding}");
|
||||
}
|
||||
|
||||
if (versionEndIncluding is not null)
|
||||
{
|
||||
lastAffected = versionEndIncluding;
|
||||
lastInclusive = true;
|
||||
expressionParts.Add($"<={versionEndIncluding}");
|
||||
}
|
||||
|
||||
if (version is not null)
|
||||
{
|
||||
introduced = version;
|
||||
introducedInclusive = true;
|
||||
lastAffected = version;
|
||||
lastInclusive = true;
|
||||
exactVersion = version;
|
||||
expressionParts.Add($"=={version}");
|
||||
}
|
||||
string? introduced = null;
|
||||
string? fixedVersion = null;
|
||||
string? lastAffected = null;
|
||||
string? exactVersion = null;
|
||||
var expressionParts = new List<string>();
|
||||
|
||||
var introducedInclusive = true;
|
||||
var fixedInclusive = false;
|
||||
var lastInclusive = true;
|
||||
|
||||
if (versionStartIncluding is not null)
|
||||
{
|
||||
introduced = versionStartIncluding;
|
||||
introducedInclusive = true;
|
||||
expressionParts.Add($">={versionStartIncluding}");
|
||||
}
|
||||
|
||||
if (versionStartExcluding is not null)
|
||||
{
|
||||
if (introduced is null)
|
||||
{
|
||||
introduced = versionStartExcluding;
|
||||
introducedInclusive = false;
|
||||
}
|
||||
expressionParts.Add($">{versionStartExcluding}");
|
||||
}
|
||||
|
||||
if (versionEndExcluding is not null)
|
||||
{
|
||||
fixedVersion = versionEndExcluding;
|
||||
fixedInclusive = false;
|
||||
expressionParts.Add($"<{versionEndExcluding}");
|
||||
}
|
||||
|
||||
if (versionEndIncluding is not null)
|
||||
{
|
||||
lastAffected = versionEndIncluding;
|
||||
lastInclusive = true;
|
||||
expressionParts.Add($"<={versionEndIncluding}");
|
||||
}
|
||||
|
||||
if (version is not null)
|
||||
{
|
||||
introduced = version;
|
||||
introducedInclusive = true;
|
||||
lastAffected = version;
|
||||
lastInclusive = true;
|
||||
exactVersion = version;
|
||||
expressionParts.Add($"=={version}");
|
||||
}
|
||||
|
||||
if (introduced is null && fixedVersion is null && lastAffected is null && vendorExtensions.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var rangeExpression = expressionParts.Count > 0 ? string.Join(' ', expressionParts) : null;
|
||||
IReadOnlyDictionary<string, string>? extensions = vendorExtensions.Count == 0 ? null : vendorExtensions;
|
||||
|
||||
SemVerPrimitive? semVerPrimitive = null;
|
||||
if (TryBuildSemVerPrimitive(
|
||||
introduced,
|
||||
introducedInclusive,
|
||||
fixedVersion,
|
||||
fixedInclusive,
|
||||
lastAffected,
|
||||
lastInclusive,
|
||||
exactVersion,
|
||||
rangeExpression,
|
||||
out var primitive))
|
||||
{
|
||||
semVerPrimitive = primitive;
|
||||
}
|
||||
|
||||
var primitives = semVerPrimitive is null && extensions is null
|
||||
? null
|
||||
: new RangePrimitives(semVerPrimitive, null, null, extensions);
|
||||
|
||||
var provenanceValue = provenance.Value ?? criteria;
|
||||
var rangeProvenance = new AdvisoryProvenance(
|
||||
provenance.Source,
|
||||
provenance.Kind,
|
||||
provenanceValue,
|
||||
provenance.RecordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges });
|
||||
|
||||
return new AffectedVersionRange(
|
||||
rangeKind: "cpe",
|
||||
introducedVersion: introduced,
|
||||
fixedVersion: fixedVersion,
|
||||
lastAffectedVersion: lastAffected,
|
||||
rangeExpression: rangeExpression,
|
||||
provenance: rangeProvenance,
|
||||
primitives);
|
||||
}
|
||||
|
||||
private static bool TryBuildSemVerPrimitive(
|
||||
string? introduced,
|
||||
bool introducedInclusive,
|
||||
string? fixedVersion,
|
||||
bool fixedInclusive,
|
||||
string? lastAffected,
|
||||
bool lastInclusive,
|
||||
string? exactVersion,
|
||||
string? constraintExpression,
|
||||
out SemVerPrimitive? primitive)
|
||||
{
|
||||
primitive = null;
|
||||
|
||||
if (!TryNormalizeSemVer(introduced, out var normalizedIntroduced)
|
||||
|| !TryNormalizeSemVer(fixedVersion, out var normalizedFixed)
|
||||
|| !TryNormalizeSemVer(lastAffected, out var normalizedLast)
|
||||
|| !TryNormalizeSemVer(exactVersion, out var normalizedExact))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (normalizedIntroduced is null && normalizedFixed is null && normalizedLast is null && normalizedExact is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
primitive = new SemVerPrimitive(
|
||||
Introduced: normalizedIntroduced,
|
||||
IntroducedInclusive: normalizedIntroduced is null ? true : introducedInclusive,
|
||||
Fixed: normalizedFixed,
|
||||
FixedInclusive: normalizedFixed is null ? false : fixedInclusive,
|
||||
LastAffected: normalizedLast,
|
||||
LastAffectedInclusive: normalizedLast is null ? false : lastInclusive,
|
||||
ConstraintExpression: constraintExpression,
|
||||
ExactValue: normalizedExact);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryNormalizeSemVer(string? value, out string? normalized)
|
||||
{
|
||||
normalized = null;
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
if (trimmed.StartsWith("v", StringComparison.OrdinalIgnoreCase) && trimmed.Length > 1)
|
||||
{
|
||||
trimmed = trimmed[1..];
|
||||
}
|
||||
|
||||
if (!NuGetVersion.TryParse(trimmed, out var parsed))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
normalized = parsed.ToNormalizedString();
|
||||
return true;
|
||||
}
|
||||
var rangeExpression = expressionParts.Count > 0 ? string.Join(' ', expressionParts) : null;
|
||||
IReadOnlyDictionary<string, string>? extensions = vendorExtensions.Count == 0 ? null : vendorExtensions;
|
||||
|
||||
private static string? BuildCweUrl(string cweId)
|
||||
{
|
||||
var dashIndex = cweId.IndexOf('-');
|
||||
if (dashIndex < 0 || dashIndex == cweId.Length - 1)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var digits = new StringBuilder();
|
||||
for (var i = dashIndex + 1; i < cweId.Length; i++)
|
||||
{
|
||||
var ch = cweId[i];
|
||||
if (char.IsDigit(ch))
|
||||
{
|
||||
digits.Append(ch);
|
||||
}
|
||||
}
|
||||
|
||||
return digits.Length == 0 ? null : $"https://cwe.mitre.org/data/definitions/{digits}.html";
|
||||
}
|
||||
|
||||
private static string? TryExtractVersionFromCriteria(string criteria)
|
||||
SemVerPrimitive? semVerPrimitive = null;
|
||||
if (TryBuildSemVerPrimitive(
|
||||
introduced,
|
||||
introducedInclusive,
|
||||
fixedVersion,
|
||||
fixedInclusive,
|
||||
lastAffected,
|
||||
lastInclusive,
|
||||
exactVersion,
|
||||
rangeExpression,
|
||||
out var primitive))
|
||||
{
|
||||
semVerPrimitive = primitive;
|
||||
}
|
||||
|
||||
var primitives = semVerPrimitive is null && extensions is null
|
||||
? null
|
||||
: new RangePrimitives(semVerPrimitive, null, null, extensions);
|
||||
|
||||
var provenanceValue = provenance.Value ?? criteria;
|
||||
var rangeProvenance = new AdvisoryProvenance(
|
||||
provenance.Source,
|
||||
provenance.Kind,
|
||||
provenanceValue,
|
||||
provenance.RecordedAt,
|
||||
new[] { ProvenanceFieldMasks.VersionRanges });
|
||||
|
||||
return new AffectedVersionRange(
|
||||
rangeKind: "cpe",
|
||||
introducedVersion: introduced,
|
||||
fixedVersion: fixedVersion,
|
||||
lastAffectedVersion: lastAffected,
|
||||
rangeExpression: rangeExpression,
|
||||
provenance: rangeProvenance,
|
||||
primitives);
|
||||
}
|
||||
|
||||
private static bool TryBuildSemVerPrimitive(
|
||||
string? introduced,
|
||||
bool introducedInclusive,
|
||||
string? fixedVersion,
|
||||
bool fixedInclusive,
|
||||
string? lastAffected,
|
||||
bool lastInclusive,
|
||||
string? exactVersion,
|
||||
string? constraintExpression,
|
||||
out SemVerPrimitive? primitive)
|
||||
{
|
||||
primitive = null;
|
||||
|
||||
if (!TryNormalizeSemVer(introduced, out var normalizedIntroduced)
|
||||
|| !TryNormalizeSemVer(fixedVersion, out var normalizedFixed)
|
||||
|| !TryNormalizeSemVer(lastAffected, out var normalizedLast)
|
||||
|| !TryNormalizeSemVer(exactVersion, out var normalizedExact))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (normalizedIntroduced is null && normalizedFixed is null && normalizedLast is null && normalizedExact is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
primitive = new SemVerPrimitive(
|
||||
Introduced: normalizedIntroduced,
|
||||
IntroducedInclusive: normalizedIntroduced is null ? true : introducedInclusive,
|
||||
Fixed: normalizedFixed,
|
||||
FixedInclusive: normalizedFixed is null ? false : fixedInclusive,
|
||||
LastAffected: normalizedLast,
|
||||
LastAffectedInclusive: normalizedLast is null ? false : lastInclusive,
|
||||
ConstraintExpression: constraintExpression,
|
||||
ExactValue: normalizedExact);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryNormalizeSemVer(string? value, out string? normalized)
|
||||
{
|
||||
normalized = null;
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
if (trimmed.StartsWith("v", StringComparison.OrdinalIgnoreCase) && trimmed.Length > 1)
|
||||
{
|
||||
trimmed = trimmed[1..];
|
||||
}
|
||||
|
||||
if (!NuGetVersion.TryParse(trimmed, out var parsed))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
normalized = parsed.ToNormalizedString();
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string? BuildCweUrl(string cweId)
|
||||
{
|
||||
var dashIndex = cweId.IndexOf('-');
|
||||
if (dashIndex < 0 || dashIndex == cweId.Length - 1)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var digits = new StringBuilder();
|
||||
for (var i = dashIndex + 1; i < cweId.Length; i++)
|
||||
{
|
||||
var ch = cweId[i];
|
||||
if (char.IsDigit(ch))
|
||||
{
|
||||
digits.Append(ch);
|
||||
}
|
||||
}
|
||||
|
||||
return digits.Length == 0 ? null : $"https://cwe.mitre.org/data/definitions/{digits}.html";
|
||||
}
|
||||
|
||||
private static string? TryExtractVersionFromCriteria(string criteria)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(criteria))
|
||||
{
|
||||
@@ -763,12 +763,12 @@ internal static class NvdMapper
|
||||
return version;
|
||||
}
|
||||
|
||||
private readonly record struct WeaknessMetadata(string CweId, string? Name);
|
||||
|
||||
private sealed class PackageAccumulator
|
||||
{
|
||||
public List<AffectedVersionRange> Ranges { get; } = new();
|
||||
|
||||
public List<AdvisoryProvenance> Provenance { get; } = new();
|
||||
}
|
||||
private readonly record struct WeaknessMetadata(string CweId, string? Name);
|
||||
|
||||
private sealed class PackageAccumulator
|
||||
{
|
||||
public List<AffectedVersionRange> Ranges { get; } = new();
|
||||
|
||||
public List<AdvisoryProvenance> Provenance { get; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Nvd.Configuration;
|
||||
using StellaOps.Concelier.Connector.Nvd.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.ChangeHistory;
|
||||
using StellaOps.Plugin;
|
||||
using Json.Schema;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,8 +20,8 @@ using StellaOps.Concelier.Connector.Osv.Configuration;
|
||||
using StellaOps.Concelier.Connector.Osv.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
@@ -426,7 +426,8 @@ public sealed class OsvConnector : IFeedConnector
|
||||
continue;
|
||||
}
|
||||
|
||||
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, bytes, "application/json", null, cancellationToken).ConfigureAwait(false);
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, bytes, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["osv.ecosystem"] = ecosystem,
|
||||
@@ -434,7 +435,6 @@ public sealed class OsvConnector : IFeedConnector
|
||||
["osv.modified"] = modified.ToString("O"),
|
||||
};
|
||||
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
recordId,
|
||||
SourceName,
|
||||
@@ -447,8 +447,9 @@ public sealed class OsvConnector : IFeedConnector
|
||||
Metadata: metadata,
|
||||
Etag: null,
|
||||
LastModified: modified,
|
||||
PayloadId: gridFsId,
|
||||
ExpiresAt: null);
|
||||
PayloadId: recordId,
|
||||
ExpiresAt: null,
|
||||
Payload: bytes);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Add(upserted.Id);
|
||||
|
||||
@@ -6,7 +6,7 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Ru.Bdu.Internal;
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Ru.Bdu.Configuration;
|
||||
using StellaOps.Concelier.Connector.Ru.Bdu.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
@@ -410,7 +410,8 @@ public sealed class RuBduConnector : IFeedConnector
|
||||
continue;
|
||||
}
|
||||
|
||||
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken).ConfigureAwait(false);
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
@@ -422,7 +423,6 @@ public sealed class RuBduConnector : IFeedConnector
|
||||
metadata["ru-bdu.name"] = dto.Name!;
|
||||
}
|
||||
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
recordId,
|
||||
SourceName,
|
||||
@@ -435,8 +435,9 @@ public sealed class RuBduConnector : IFeedConnector
|
||||
Metadata: metadata,
|
||||
Etag: null,
|
||||
LastModified: archiveLastModified ?? dto.IdentifyDate,
|
||||
PayloadId: gridFsId,
|
||||
ExpiresAt: null);
|
||||
PayloadId: recordId,
|
||||
ExpiresAt: null,
|
||||
Payload: payload);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Add(upserted.Id);
|
||||
|
||||
@@ -5,7 +5,7 @@ using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Normalization.Cvss;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Ru.Nkcki.Configuration;
|
||||
using StellaOps.Concelier.Connector.Ru.Nkcki.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
@@ -609,7 +609,8 @@ public sealed class RuNkckiConnector : IFeedConnector
|
||||
return false;
|
||||
}
|
||||
|
||||
var gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken).ConfigureAwait(false);
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
_ = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", null, cancellationToken, recordId).ConfigureAwait(false);
|
||||
|
||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
@@ -627,7 +628,6 @@ public sealed class RuNkckiConnector : IFeedConnector
|
||||
metadata["ru-nkcki.mitre_id"] = dto.MitreId!;
|
||||
}
|
||||
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
var lastModified = dto.DateUpdated ?? dto.DatePublished;
|
||||
var record = new DocumentRecord(
|
||||
recordId,
|
||||
@@ -641,8 +641,9 @@ public sealed class RuNkckiConnector : IFeedConnector
|
||||
Metadata: metadata,
|
||||
Etag: null,
|
||||
LastModified: lastModified,
|
||||
PayloadId: gridFsId,
|
||||
ExpiresAt: null);
|
||||
PayloadId: recordId,
|
||||
ExpiresAt: null,
|
||||
Payload: payload);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Add(upserted.Id);
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.StellaOpsMirror.Settings;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
@@ -226,7 +226,8 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
|
||||
return existing;
|
||||
}
|
||||
|
||||
var gridFsId = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, cancellationToken).ConfigureAwait(false);
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
_ = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, ExpiresAt: null, cancellationToken, recordId).ConfigureAwait(false);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var sha = ComputeSha256(payload);
|
||||
|
||||
@@ -240,7 +241,7 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
|
||||
};
|
||||
|
||||
var record = new DocumentRecord(
|
||||
existing?.Id ?? Guid.NewGuid(),
|
||||
recordId,
|
||||
Source,
|
||||
absolute,
|
||||
now,
|
||||
@@ -251,8 +252,9 @@ public sealed class StellaOpsMirrorConnector : IFeedConnector
|
||||
Metadata: metadata,
|
||||
Etag: null,
|
||||
LastModified: generatedAt,
|
||||
PayloadId: gridFsId,
|
||||
ExpiresAt: null);
|
||||
PayloadId: recordId,
|
||||
ExpiresAt: null,
|
||||
Payload: payload);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Vndr.Adobe.Configuration;
|
||||
using StellaOps.Concelier.Connector.Vndr.Adobe.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Adobe.Internal;
|
||||
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Vndr.Apple.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Packages;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Apple.Internal;
|
||||
|
||||
@@ -14,8 +14,8 @@ using StellaOps.Concelier.Connector.Vndr.Chromium.Configuration;
|
||||
using StellaOps.Concelier.Connector.Vndr.Chromium.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
using StellaOps.Plugin;
|
||||
using Json.Schema;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Chromium.Internal;
|
||||
|
||||
|
||||
@@ -6,15 +6,14 @@ using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Vndr.Cisco.Configuration;
|
||||
using StellaOps.Concelier.Connector.Vndr.Cisco.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Cisco;
|
||||
@@ -138,19 +137,16 @@ public sealed class CiscoConnector : IFeedConnector
|
||||
continue;
|
||||
}
|
||||
|
||||
ObjectId gridFsId;
|
||||
try
|
||||
{
|
||||
gridFsId = await _rawDocumentStorage.UploadAsync(SourceName, documentUri, payload, "application/json", cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (MongoWriteException ex)
|
||||
{
|
||||
_diagnostics.FetchFailure();
|
||||
_logger.LogError(ex, "Failed to upload Cisco advisory {AdvisoryId} to GridFS", advisory.AdvisoryId);
|
||||
throw;
|
||||
}
|
||||
|
||||
var recordId = existing?.Id ?? Guid.NewGuid();
|
||||
_ = await _rawDocumentStorage.UploadAsync(
|
||||
SourceName,
|
||||
documentUri,
|
||||
payload,
|
||||
"application/json",
|
||||
ExpiresAt: null,
|
||||
cancellationToken,
|
||||
recordId).ConfigureAwait(false);
|
||||
|
||||
var record = new DocumentRecord(
|
||||
recordId,
|
||||
SourceName,
|
||||
@@ -163,8 +159,9 @@ public sealed class CiscoConnector : IFeedConnector
|
||||
BuildMetadata(advisory),
|
||||
Etag: null,
|
||||
LastModified: advisory.LastUpdated ?? advisory.FirstPublished ?? now,
|
||||
PayloadId: gridFsId,
|
||||
ExpiresAt: null);
|
||||
PayloadId: recordId,
|
||||
ExpiresAt: null,
|
||||
Payload: payload);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
pendingDocuments.Add(upserted.Id);
|
||||
@@ -221,7 +218,7 @@ public sealed class CiscoConnector : IFeedConnector
|
||||
latestModified,
|
||||
latestAdvisoryId);
|
||||
}
|
||||
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException or MongoException)
|
||||
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException)
|
||||
{
|
||||
_diagnostics.FetchFailure();
|
||||
_logger.LogError(ex, "Cisco fetch failed");
|
||||
|
||||
@@ -3,8 +3,8 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common.Packages;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Normalization.SemVer;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Cisco.Internal;
|
||||
|
||||
@@ -2,7 +2,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Msrc.Internal;
|
||||
|
||||
|
||||
@@ -16,8 +16,8 @@ using StellaOps.Concelier.Connector.Vndr.Msrc.Configuration;
|
||||
using StellaOps.Concelier.Connector.Vndr.Msrc.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Msrc;
|
||||
@@ -141,7 +141,16 @@ public sealed class MsrcConnector : IFeedConnector
|
||||
var bytes = await _apiClient.FetchDetailAsync(vulnerabilityId, cancellationToken).ConfigureAwait(false);
|
||||
var sha = Convert.ToHexString(SHA256.HashData(bytes)).ToLowerInvariant();
|
||||
|
||||
var gridId = await _rawDocumentStorage.UploadAsync(SourceName, detailUri, bytes, "application/json", cancellationToken).ConfigureAwait(false);
|
||||
var documentId = existing?.Id ?? Guid.NewGuid();
|
||||
|
||||
_ = await _rawDocumentStorage.UploadAsync(
|
||||
SourceName,
|
||||
detailUri,
|
||||
bytes,
|
||||
"application/json",
|
||||
ExpiresAt: null,
|
||||
cancellationToken,
|
||||
documentId).ConfigureAwait(false);
|
||||
|
||||
var metadata = MsrcDocumentMetadata.CreateMetadata(summary);
|
||||
var headers = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
@@ -149,7 +158,6 @@ public sealed class MsrcConnector : IFeedConnector
|
||||
["content-type"] = "application/json",
|
||||
};
|
||||
|
||||
var documentId = existing?.Id ?? Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
documentId,
|
||||
SourceName,
|
||||
@@ -162,7 +170,8 @@ public sealed class MsrcConnector : IFeedConnector
|
||||
metadata,
|
||||
existing?.Etag,
|
||||
summary.LastModifiedDate,
|
||||
gridId);
|
||||
documentId,
|
||||
Payload: bytes);
|
||||
|
||||
var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Oracle.Internal;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Oracle.Internal;
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common.Packages;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Oracle.Internal;
|
||||
|
||||
@@ -13,8 +13,8 @@ using StellaOps.Concelier.Connector.Vndr.Oracle.Configuration;
|
||||
using StellaOps.Concelier.Connector.Vndr.Oracle.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
using System;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Vmware.Internal;
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ using System.Linq;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Packages;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Vndr.Vmware.Internal;
|
||||
|
||||
@@ -16,8 +16,8 @@ using StellaOps.Concelier.Connector.Vndr.Vmware.Configuration;
|
||||
using StellaOps.Concelier.Connector.Vndr.Vmware.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
|
||||
@@ -1,22 +1,20 @@
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Concelier.Core.Jobs;
|
||||
|
||||
public interface IJobStore
|
||||
{
|
||||
Task<JobRunSnapshot> CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<JobRunSnapshot> CreateAsync(JobRunCreateRequest request, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<JobRunSnapshot?> TryStartAsync(Guid runId, DateTimeOffset startedAt, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<JobRunSnapshot?> TryCompleteAsync(Guid runId, JobRunCompletion completion, CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> FindAsync(Guid runId, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<JobRunSnapshot?> FindAsync(Guid runId, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken);
|
||||
|
||||
Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken, IClientSessionHandle? session = null);
|
||||
Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight compatibility bootstrapper to satisfy legacy Mongo wiring during Postgres migration.
|
||||
/// Registers in-memory stores only; no MongoDB driver/runtime required.
|
||||
/// </summary>
|
||||
public sealed class MongoBootstrapper
|
||||
{
|
||||
public Task InitializeAsync(CancellationToken cancellationToken) => Task.CompletedTask;
|
||||
}
|
||||
|
||||
public static class MongoServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddMongoStorage(this IServiceCollection services, Action<MongoStorageOptions>? configure = null)
|
||||
{
|
||||
var options = new MongoStorageOptions();
|
||||
configure?.Invoke(options);
|
||||
|
||||
services.TryAddSingleton<IDocumentStore, InMemoryDocumentStore>();
|
||||
services.TryAddSingleton<IDtoStore, InMemoryDtoStore>();
|
||||
services.TryAddSingleton<ISourceStateRepository, InMemorySourceStateRepository>();
|
||||
|
||||
services.TryAddSingleton<Advisories.IAdvisoryStore, Advisories.InMemoryAdvisoryStore>();
|
||||
services.TryAddSingleton<Aliases.IAliasStore, Aliases.InMemoryAliasStore>();
|
||||
services.TryAddSingleton<ChangeHistory.IChangeHistoryStore, ChangeHistory.InMemoryChangeHistoryStore>();
|
||||
services.TryAddSingleton<Exporting.IExportStateStore, Exporting.InMemoryExportStateStore>();
|
||||
services.TryAddSingleton<MergeEvents.IMergeEventStore, MergeEvents.InMemoryMergeEventStore>();
|
||||
services.TryAddSingleton<PsirtFlags.IPsirtFlagStore, PsirtFlags.InMemoryPsirtFlagStore>();
|
||||
|
||||
services.TryAddSingleton<MongoBootstrapper>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace MongoDB.Bson
|
||||
@@ -25,6 +27,14 @@ namespace MongoDB.Bson
|
||||
protected readonly object? _value;
|
||||
public BsonValue(object? value) => _value = value;
|
||||
internal object? RawValue => _value;
|
||||
public static implicit operator BsonValue(string value) => new BsonString(value ?? string.Empty);
|
||||
public static implicit operator BsonValue(bool value) => new BsonBoolean(value);
|
||||
public static implicit operator BsonValue(int value) => new BsonInt32(value);
|
||||
public static implicit operator BsonValue(long value) => new BsonInt64(value);
|
||||
public static implicit operator BsonValue(double value) => new BsonDouble(value);
|
||||
public static implicit operator BsonValue(DateTime value) => new BsonDateTime(DateTime.SpecifyKind(value, DateTimeKind.Utc));
|
||||
public static implicit operator BsonValue(DateTimeOffset value) => new BsonDateTime(value.UtcDateTime);
|
||||
public static implicit operator BsonValue(Guid value) => new BsonString(value.ToString("D"));
|
||||
public static BsonValue Create(object? value) => BsonDocument.WrapExternal(value);
|
||||
public virtual BsonType BsonType => _value switch
|
||||
{
|
||||
@@ -37,21 +47,36 @@ namespace MongoDB.Bson
|
||||
long => BsonType.Int64,
|
||||
double => BsonType.Double,
|
||||
DateTime => BsonType.DateTime,
|
||||
DateTimeOffset => BsonType.DateTime,
|
||||
Guid => BsonType.Guid,
|
||||
_ => BsonType.Null
|
||||
};
|
||||
public bool IsString => _value is string;
|
||||
public bool IsBsonDocument => _value is BsonDocument;
|
||||
public bool IsBsonArray => _value is BsonArray;
|
||||
public bool IsBsonNull => _value is null;
|
||||
public string AsString => _value?.ToString() ?? string.Empty;
|
||||
public BsonDocument AsBsonDocument => _value as BsonDocument ?? throw new InvalidCastException();
|
||||
public BsonArray AsBsonArray => _value as BsonArray ?? throw new InvalidCastException();
|
||||
public Guid AsGuid => _value is Guid g ? g : Guid.Empty;
|
||||
public DateTime AsDateTime => _value is DateTime dt ? dt : DateTime.MinValue;
|
||||
public DateTime AsDateTime => _value switch
|
||||
{
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
DateTime dt => dt,
|
||||
_ => DateTime.MinValue
|
||||
};
|
||||
public int AsInt32 => _value is int i ? i : 0;
|
||||
public long AsInt64 => _value is long l ? l : 0;
|
||||
public double AsDouble => _value is double d ? d : 0d;
|
||||
public bool AsBoolean => _value is bool b && b;
|
||||
public bool IsInt32 => _value is int;
|
||||
public DateTime ToUniversalTime() => _value switch
|
||||
{
|
||||
DateTimeOffset dto => dto.UtcDateTime,
|
||||
DateTime dt => dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime(),
|
||||
string s when DateTimeOffset.TryParse(s, out var parsed) => parsed.UtcDateTime,
|
||||
_ => DateTime.MinValue
|
||||
};
|
||||
public override string ToString() => _value?.ToString() ?? string.Empty;
|
||||
}
|
||||
|
||||
@@ -67,6 +92,27 @@ namespace MongoDB.Bson
|
||||
public static BsonNull Value { get; } = new();
|
||||
}
|
||||
|
||||
public sealed class BsonElement
|
||||
{
|
||||
public BsonElement(string name, BsonValue value)
|
||||
{
|
||||
Name = name;
|
||||
Value = value;
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
public BsonValue Value { get; }
|
||||
}
|
||||
|
||||
public class BsonBinaryData : BsonValue
|
||||
{
|
||||
private readonly byte[] _bytes;
|
||||
public BsonBinaryData(byte[] bytes) : base(null) => _bytes = bytes ?? Array.Empty<byte>();
|
||||
public BsonBinaryData(Guid guid) : this(guid.ToByteArray()) { }
|
||||
public byte[] AsByteArray => _bytes;
|
||||
public Guid ToGuid() => new(_bytes);
|
||||
}
|
||||
|
||||
public class BsonArray : BsonValue, IEnumerable<BsonValue>
|
||||
{
|
||||
private readonly List<BsonValue> _items = new();
|
||||
@@ -112,13 +158,25 @@ namespace MongoDB.Bson
|
||||
public BsonValue this[string key]
|
||||
{
|
||||
get => _values[key];
|
||||
set => _values[key] = value;
|
||||
set => _values[key] = Wrap(value);
|
||||
}
|
||||
|
||||
public int ElementCount => _values.Count;
|
||||
public IEnumerable<BsonElement> Elements => _values.Select(kvp => new BsonElement(kvp.Key, kvp.Value));
|
||||
|
||||
public bool Contains(string key) => _values.ContainsKey(key);
|
||||
|
||||
public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!);
|
||||
|
||||
public BsonValue GetValue(string key, BsonValue? defaultValue = null)
|
||||
{
|
||||
return _values.TryGetValue(key, out var value)
|
||||
? value
|
||||
: defaultValue ?? new BsonValue(null);
|
||||
}
|
||||
|
||||
public bool Remove(string key) => _values.Remove(key);
|
||||
|
||||
public void Add(string key, BsonValue value) => _values[key] = value;
|
||||
public void Add(string key, object? value) => _values[key] = Wrap(value);
|
||||
|
||||
@@ -169,6 +227,8 @@ namespace MongoDB.Bson
|
||||
return JsonSerializer.Serialize(dict, new JsonSerializerOptions(JsonSerializerDefaults.Web));
|
||||
}
|
||||
|
||||
public byte[] ToBson() => Encoding.UTF8.GetBytes(ToJson());
|
||||
|
||||
private static object? Unwrap(BsonValue value) => value switch
|
||||
{
|
||||
BsonDocument doc => doc._values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)),
|
||||
@@ -186,22 +246,3 @@ namespace MongoDB.Bson.IO
|
||||
public JsonOutputMode OutputMode { get; set; } = JsonOutputMode.Strict;
|
||||
}
|
||||
}
|
||||
|
||||
namespace MongoDB.Driver
|
||||
{
|
||||
public interface IClientSessionHandle { }
|
||||
public class MongoCommandException : Exception
|
||||
{
|
||||
public string CodeName { get; }
|
||||
public MongoCommandException(string codeName, string message) : base(message) => CodeName = codeName;
|
||||
}
|
||||
public class GridFSFileNotFoundException : Exception
|
||||
{
|
||||
public GridFSFileNotFoundException() { }
|
||||
public GridFSFileNotFoundException(string message) : base(message) { }
|
||||
}
|
||||
public class MongoClient
|
||||
{
|
||||
public MongoClient(string connectionString) { }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,293 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace MongoDB.Driver
|
||||
{
|
||||
public interface IClientSessionHandle : IDisposable { }
|
||||
|
||||
public class MongoCommandException : Exception
|
||||
{
|
||||
public MongoCommandException(string message, string codeName = "") : base(message) => CodeName = codeName;
|
||||
public string CodeName { get; }
|
||||
}
|
||||
|
||||
public class MongoClientSettings
|
||||
{
|
||||
public static MongoClientSettings FromUrl(MongoUrl url) => new();
|
||||
public string? ApplicationName { get; set; }
|
||||
}
|
||||
|
||||
public class MongoUrl
|
||||
{
|
||||
public MongoUrl(string url) => Url = url;
|
||||
public string Url { get; }
|
||||
public string DatabaseName => "default";
|
||||
}
|
||||
|
||||
public interface IMongoClient
|
||||
{
|
||||
IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null);
|
||||
}
|
||||
|
||||
public class MongoClient : IMongoClient
|
||||
{
|
||||
public MongoClient(string connectionString) { }
|
||||
public MongoClient(MongoClientSettings settings) { }
|
||||
public IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null) => new MongoDatabase(name);
|
||||
}
|
||||
|
||||
public class MongoDatabaseSettings { }
|
||||
|
||||
public interface IMongoDatabase
|
||||
{
|
||||
IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null);
|
||||
}
|
||||
|
||||
public class MongoDatabase : IMongoDatabase
|
||||
{
|
||||
public MongoDatabase(string name) => Name = name;
|
||||
public string Name { get; }
|
||||
public IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null) => new MongoCollection<TDocument>(name);
|
||||
}
|
||||
|
||||
public class MongoCollectionSettings { }
|
||||
|
||||
public interface IMongoCollection<TDocument>
|
||||
{
|
||||
Task InsertOneAsync(TDocument document, InsertOneOptions? options = null, CancellationToken cancellationToken = default);
|
||||
Task<ReplaceOneResult> ReplaceOneAsync(FilterDefinition<TDocument> filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default);
|
||||
Task<DeleteResult> DeleteOneAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default);
|
||||
Task<IAsyncCursor<TDocument>> FindAsync(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null, CancellationToken cancellationToken = default);
|
||||
IFindFluent<TDocument, TDocument> Find(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null);
|
||||
Task<long> CountDocumentsAsync(FilterDefinition<TDocument> filter, CountOptions? options = null, CancellationToken cancellationToken = default);
|
||||
Task<TProjection?> FindOneAndReplaceAsync<TProjection>(FilterDefinition<TDocument> filter, TDocument replacement, FindOneAndReplaceOptions<TDocument, TProjection>? options = null, CancellationToken cancellationToken = default);
|
||||
Task<TProjection?> FindOneAndUpdateAsync<TProjection>(FilterDefinition<TDocument> filter, UpdateDefinition<TDocument> update, FindOneAndUpdateOptions<TDocument, TProjection>? options = null, CancellationToken cancellationToken = default);
|
||||
IMongoIndexManager<TDocument> Indexes { get; }
|
||||
}
|
||||
|
||||
public class MongoCollection<TDocument> : IMongoCollection<TDocument>
|
||||
{
|
||||
private readonly List<TDocument> _docs = new();
|
||||
|
||||
public MongoCollection(string name)
|
||||
{
|
||||
Name = name;
|
||||
Indexes = new MongoIndexManager<TDocument>();
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
public IMongoIndexManager<TDocument> Indexes { get; }
|
||||
|
||||
public Task InsertOneAsync(TDocument document, InsertOneOptions? options = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_docs.Add(document);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<ReplaceOneResult> ReplaceOneAsync(FilterDefinition<TDocument> filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_docs.Clear();
|
||||
_docs.Add(replacement);
|
||||
return Task.FromResult(new ReplaceOneResult());
|
||||
}
|
||||
|
||||
public Task<DeleteResult> DeleteOneAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var removed = _docs.Count > 0;
|
||||
_docs.Clear();
|
||||
return Task.FromResult(new DeleteResult(removed ? 1 : 0));
|
||||
}
|
||||
|
||||
public Task<IAsyncCursor<TDocument>> FindAsync(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IAsyncCursor<TDocument>>(new AsyncCursor<TDocument>(_docs));
|
||||
|
||||
public IFindFluent<TDocument, TDocument> Find(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null)
|
||||
=> new FindFluent<TDocument>(_docs);
|
||||
|
||||
public Task<long> CountDocumentsAsync(FilterDefinition<TDocument> filter, CountOptions? options = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult((long)_docs.Count);
|
||||
|
||||
public Task<TProjection?> FindOneAndReplaceAsync<TProjection>(FilterDefinition<TDocument> filter, TDocument replacement, FindOneAndReplaceOptions<TDocument, TProjection>? options = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_ = ReplaceOneAsync(filter, replacement, null, cancellationToken);
|
||||
return Task.FromResult(default(TProjection));
|
||||
}
|
||||
|
||||
public Task<TProjection?> FindOneAndUpdateAsync<TProjection>(FilterDefinition<TDocument> filter, UpdateDefinition<TDocument> update, FindOneAndUpdateOptions<TDocument, TProjection>? options = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(default(TProjection));
|
||||
}
|
||||
|
||||
public interface IMongoIndexManager<TDocument>
|
||||
{
|
||||
Task<string> CreateOneAsync(IndexKeysDefinition<TDocument> keys, CreateIndexOptions? options = null, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed class MongoIndexManager<TDocument> : IMongoIndexManager<TDocument>
|
||||
{
|
||||
public Task<string> CreateOneAsync(IndexKeysDefinition<TDocument> keys, CreateIndexOptions? options = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult("stub-index");
|
||||
}
|
||||
|
||||
public interface IAsyncCursor<out T> : IDisposable, IEnumerable<T>
|
||||
{
|
||||
IEnumerable<T> Current { get; }
|
||||
bool MoveNext(CancellationToken cancellationToken = default);
|
||||
Task<bool> MoveNextAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed class AsyncCursor<T> : IAsyncCursor<T>
|
||||
{
|
||||
private readonly IEnumerator<T> _enumerator;
|
||||
private bool _disposed;
|
||||
|
||||
public AsyncCursor(IEnumerable<T> items)
|
||||
{
|
||||
_enumerator = items?.GetEnumerator() ?? Enumerable.Empty<T>().GetEnumerator();
|
||||
}
|
||||
|
||||
public IEnumerable<T> Current => new[] { _enumerator.Current };
|
||||
|
||||
public bool MoveNext(CancellationToken cancellationToken = default) => _enumerator.MoveNext();
|
||||
|
||||
public Task<bool> MoveNextAsync(CancellationToken cancellationToken = default) => Task.FromResult(_enumerator.MoveNext());
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed) return;
|
||||
_enumerator.Dispose();
|
||||
_disposed = true;
|
||||
}
|
||||
|
||||
public IEnumerator<T> GetEnumerator() => _enumerator;
|
||||
IEnumerator IEnumerable.GetEnumerator() => _enumerator;
|
||||
}
|
||||
|
||||
public interface IFindFluent<TDocument, TProjection>
|
||||
{
|
||||
IFindFluent<TDocument, TProjection> Sort(SortDefinition<TDocument> sort);
|
||||
IFindFluent<TDocument, TProjection> Limit(int? limit);
|
||||
IFindFluent<TDocument, TProjection> Skip(int? skip);
|
||||
Task<List<TProjection>> ToListAsync(CancellationToken cancellationToken = default);
|
||||
List<TProjection> ToList();
|
||||
TProjection? FirstOrDefault(CancellationToken cancellationToken = default);
|
||||
Task<TProjection?> FirstOrDefaultAsync(CancellationToken cancellationToken = default);
|
||||
IFindFluent<TDocument, TNewProjection> Project<TNewProjection>(ProjectionDefinition<TDocument, TNewProjection> projection);
|
||||
}
|
||||
|
||||
public sealed class FindFluent<TDocument> : IFindFluent<TDocument, TDocument>
|
||||
{
|
||||
private readonly List<TDocument> _items;
|
||||
|
||||
public FindFluent(IEnumerable<TDocument> items) => _items = items?.ToList() ?? new List<TDocument>();
|
||||
|
||||
public IFindFluent<TDocument, TDocument> Sort(SortDefinition<TDocument> sort) => this;
|
||||
public IFindFluent<TDocument, TDocument> Limit(int? limit) => this;
|
||||
public IFindFluent<TDocument, TDocument> Skip(int? skip) => this;
|
||||
public Task<List<TDocument>> ToListAsync(CancellationToken cancellationToken = default) => Task.FromResult(ToList());
|
||||
public List<TDocument> ToList() => _items.ToList();
|
||||
public TDocument? FirstOrDefault(CancellationToken cancellationToken = default) => _items.FirstOrDefault();
|
||||
public Task<TDocument?> FirstOrDefaultAsync(CancellationToken cancellationToken = default) => Task.FromResult(FirstOrDefault());
|
||||
public IFindFluent<TDocument, TNewProjection> Project<TNewProjection>(ProjectionDefinition<TDocument, TNewProjection> projection)
|
||||
=> new FindFluentProjected<TDocument, TNewProjection>(Enumerable.Empty<TNewProjection>());
|
||||
}
|
||||
|
||||
public sealed class FindFluentProjected<TDocument, TProjection> : IFindFluent<TDocument, TProjection>
|
||||
{
|
||||
private readonly List<TProjection> _items;
|
||||
|
||||
public FindFluentProjected(IEnumerable<TProjection> items) => _items = items?.ToList() ?? new List<TProjection>();
|
||||
|
||||
public IFindFluent<TDocument, TProjection> Sort(SortDefinition<TDocument> sort) => this;
|
||||
public IFindFluent<TDocument, TProjection> Limit(int? limit) => this;
|
||||
public IFindFluent<TDocument, TProjection> Skip(int? skip) => this;
|
||||
public Task<List<TProjection>> ToListAsync(CancellationToken cancellationToken = default) => Task.FromResult(ToList());
|
||||
public List<TProjection> ToList() => _items.ToList();
|
||||
public TProjection? FirstOrDefault(CancellationToken cancellationToken = default) => _items.FirstOrDefault();
|
||||
public Task<TProjection?> FirstOrDefaultAsync(CancellationToken cancellationToken = default) => Task.FromResult(FirstOrDefault());
|
||||
public IFindFluent<TDocument, TNewProjection> Project<TNewProjection>(ProjectionDefinition<TDocument, TNewProjection> projection)
|
||||
=> new FindFluentProjected<TDocument, TNewProjection>(Enumerable.Empty<TNewProjection>());
|
||||
}
|
||||
|
||||
public class FilterDefinition<TDocument> { }
|
||||
public class UpdateDefinition<TDocument> { }
|
||||
public class ProjectionDefinition<TDocument, TProjection> { }
|
||||
public class SortDefinition<TDocument> { }
|
||||
public class CountOptions { }
|
||||
public class FindOptions<TDocument, TProjection> { }
|
||||
public class ReplaceOptions { public bool IsUpsert { get; set; } }
|
||||
public class FindOneAndReplaceOptions<TDocument, TProjection> { public bool IsUpsert { get; set; } }
|
||||
public class FindOneAndUpdateOptions<TDocument, TProjection> { public bool IsUpsert { get; set; } }
|
||||
public class InsertOneOptions { }
|
||||
public class CreateIndexOptions { }
|
||||
public class IndexKeysDefinition<TDocument> { }
|
||||
|
||||
public sealed class DeleteResult
|
||||
{
|
||||
public DeleteResult(long deletedCount) => DeletedCount = deletedCount;
|
||||
public long DeletedCount { get; }
|
||||
}
|
||||
|
||||
public sealed class ReplaceOneResult
|
||||
{
|
||||
public long MatchedCount { get; init; }
|
||||
public long ModifiedCount { get; init; }
|
||||
}
|
||||
|
||||
public sealed class UpdateResult
|
||||
{
|
||||
public UpdateResult(long modifiedCount) => ModifiedCount = modifiedCount;
|
||||
public long ModifiedCount { get; }
|
||||
}
|
||||
|
||||
public enum SortDirection { Ascending, Descending }
|
||||
|
||||
public sealed class SortDefinitionBuilder<TDocument>
|
||||
{
|
||||
public SortDefinition<TDocument> Ascending(string field) => new();
|
||||
public SortDefinition<TDocument> Descending(string field) => new();
|
||||
}
|
||||
|
||||
public sealed class ProjectionDefinitionBuilder<TDocument>
|
||||
{
|
||||
public ProjectionDefinition<TDocument, TDocument> Include(string field) => new();
|
||||
}
|
||||
|
||||
public sealed class FilterDefinitionBuilder<TDocument>
|
||||
{
|
||||
public FilterDefinition<TDocument> Empty => new();
|
||||
public FilterDefinition<TDocument> Eq<TField>(string field, TField value) => new();
|
||||
}
|
||||
|
||||
public static class Builders<TDocument>
|
||||
{
|
||||
public static FilterDefinitionBuilder<TDocument> Filter { get; } = new();
|
||||
public static SortDefinitionBuilder<TDocument> Sort { get; } = new();
|
||||
public static ProjectionDefinitionBuilder<TDocument> Projection { get; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
namespace MongoDB.Driver.Linq
|
||||
{
|
||||
public interface IMongoQueryable<out T> : IQueryable<T> { }
|
||||
}
|
||||
|
||||
namespace Mongo2Go
|
||||
{
|
||||
public sealed class MongoDbRunner : IDisposable
|
||||
{
|
||||
public string ConnectionString { get; }
|
||||
public string DataDirectory { get; } = string.Empty;
|
||||
|
||||
private MongoDbRunner(string connectionString) => ConnectionString = connectionString;
|
||||
|
||||
public static MongoDbRunner Start() => new("mongodb://localhost:27017/fake");
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,14 +5,6 @@ using StellaOps.Concelier.Models;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo
|
||||
{
|
||||
public static class DocumentStatuses
|
||||
{
|
||||
public const string PendingParse = "pending_parse";
|
||||
public const string PendingMap = "pending_map";
|
||||
public const string Mapped = "mapped";
|
||||
public const string Failed = "failed";
|
||||
}
|
||||
|
||||
public static class MongoStorageDefaults
|
||||
{
|
||||
public static class Collections
|
||||
@@ -21,68 +13,107 @@ namespace StellaOps.Concelier.Storage.Mongo
|
||||
public const string AdvisoryRaw = "advisory_raw";
|
||||
public const string Alias = "aliases";
|
||||
public const string MergeEvent = "merge_events";
|
||||
public const string Document = "documents";
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record MongoStorageOptions
|
||||
{
|
||||
public string DefaultTenant { get; init; } = "default";
|
||||
public TimeSpan RawDocumentRetention { get; init; } = TimeSpan.Zero;
|
||||
public TimeSpan RawDocumentRetentionTtlGrace { get; init; } = TimeSpan.Zero;
|
||||
public TimeSpan RawDocumentRetentionSweepInterval { get; init; } = TimeSpan.FromHours(1);
|
||||
public string ConnectionString { get; init; } = string.Empty;
|
||||
public string DatabaseName { get; init; } = "concelier";
|
||||
public string DefaultTenant { get; set; } = "default";
|
||||
public TimeSpan RawDocumentRetention { get; set; } = TimeSpan.Zero;
|
||||
public TimeSpan RawDocumentRetentionTtlGrace { get; set; } = TimeSpan.Zero;
|
||||
public TimeSpan RawDocumentRetentionSweepInterval { get; set; } = TimeSpan.FromHours(1);
|
||||
public string ConnectionString { get; set; } = string.Empty;
|
||||
public string DatabaseName { get; set; } = "concelier";
|
||||
public TimeSpan CommandTimeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
}
|
||||
|
||||
public sealed record DocumentRecord(
|
||||
Guid Id,
|
||||
string SourceName,
|
||||
string Uri,
|
||||
DateTimeOffset CreatedAt,
|
||||
string Sha256,
|
||||
string Status,
|
||||
string? ContentType = null,
|
||||
IReadOnlyDictionary<string, string>? Headers = null,
|
||||
IReadOnlyDictionary<string, string>? Metadata = null,
|
||||
string? Etag = null,
|
||||
DateTimeOffset? LastModified = null,
|
||||
Guid? PayloadId = null,
|
||||
DateTimeOffset? ExpiresAt = null,
|
||||
byte[]? Payload = null);
|
||||
|
||||
public interface IDocumentStore
|
||||
public sealed record DocumentRecord
|
||||
{
|
||||
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
|
||||
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
|
||||
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
|
||||
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
|
||||
public DocumentRecord(
|
||||
Guid Id,
|
||||
string SourceName,
|
||||
string Uri,
|
||||
DateTimeOffset CreatedAt,
|
||||
string Sha256,
|
||||
string Status = "pending_parse",
|
||||
string? ContentType = null,
|
||||
IReadOnlyDictionary<string, string>? Headers = null,
|
||||
IReadOnlyDictionary<string, string>? Metadata = null,
|
||||
string? Etag = null,
|
||||
DateTimeOffset? LastModified = null,
|
||||
Guid? PayloadId = null,
|
||||
DateTimeOffset? ExpiresAt = null,
|
||||
byte[]? Payload = null,
|
||||
DateTimeOffset? FetchedAt = null)
|
||||
{
|
||||
this.Id = Id;
|
||||
this.SourceName = SourceName;
|
||||
this.Uri = Uri;
|
||||
this.CreatedAt = CreatedAt;
|
||||
this.Sha256 = Sha256;
|
||||
this.Status = Status;
|
||||
this.ContentType = ContentType;
|
||||
this.Headers = Headers;
|
||||
this.Metadata = Metadata;
|
||||
this.Etag = Etag;
|
||||
this.LastModified = LastModified;
|
||||
this.PayloadId = PayloadId;
|
||||
this.ExpiresAt = ExpiresAt;
|
||||
this.Payload = Payload;
|
||||
this.FetchedAt = FetchedAt ?? CreatedAt;
|
||||
}
|
||||
|
||||
public Guid Id { get; init; }
|
||||
public string SourceName { get; init; }
|
||||
public string Uri { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset FetchedAt { get; init; }
|
||||
public string Sha256 { get; init; }
|
||||
public string Status { get; init; }
|
||||
public string? ContentType { get; init; }
|
||||
public IReadOnlyDictionary<string, string>? Headers { get; init; }
|
||||
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
|
||||
public string? Etag { get; init; }
|
||||
public DateTimeOffset? LastModified { get; init; }
|
||||
public Guid? PayloadId { get; init; }
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
public byte[]? Payload { get; init; }
|
||||
}
|
||||
|
||||
public sealed class InMemoryDocumentStore : IDocumentStore
|
||||
public interface IDocumentStore
|
||||
{
|
||||
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken);
|
||||
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken);
|
||||
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken);
|
||||
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public class InMemoryDocumentStore : IDocumentStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<(string Source, string Uri), DocumentRecord> _records = new();
|
||||
private readonly ConcurrentDictionary<Guid, DocumentRecord> _byId = new();
|
||||
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
{
|
||||
_records.TryGetValue((sourceName, uri), out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue((sourceName, uri), out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
{
|
||||
_byId.TryGetValue(id, out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
|
||||
{
|
||||
_byId.TryGetValue(id, out var record);
|
||||
return Task.FromResult<DocumentRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
{
|
||||
_records[(record.SourceName, record.Uri)] = record;
|
||||
_byId[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[(record.SourceName, record.Uri)] = record;
|
||||
_byId[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
|
||||
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_byId.TryGetValue(id, out var existing))
|
||||
{
|
||||
@@ -94,38 +125,66 @@ namespace StellaOps.Concelier.Storage.Mongo
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record DtoRecord(
|
||||
public class DocumentStore : IDocumentStore
|
||||
{
|
||||
private readonly InMemoryDocumentStore _inner = new();
|
||||
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
=> _inner.FindBySourceAndUriAsync(sourceName, uri, cancellationToken);
|
||||
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
|
||||
=> _inner.FindAsync(id, cancellationToken);
|
||||
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
|
||||
=> _inner.UpsertAsync(record, cancellationToken);
|
||||
|
||||
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken)
|
||||
=> _inner.UpdateStatusAsync(id, status, cancellationToken);
|
||||
}
|
||||
|
||||
public record DtoRecord(
|
||||
Guid Id,
|
||||
Guid DocumentId,
|
||||
string SourceName,
|
||||
string Format,
|
||||
MongoDB.Bson.BsonDocument Payload,
|
||||
DateTimeOffset CreatedAt);
|
||||
|
||||
public interface IDtoStore
|
||||
DateTimeOffset CreatedAt)
|
||||
{
|
||||
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
|
||||
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null);
|
||||
public string SchemaVersion { get; init; } = string.Empty;
|
||||
public DateTimeOffset ValidatedAt { get; init; } = CreatedAt;
|
||||
}
|
||||
|
||||
public sealed class InMemoryDtoStore : IDtoStore
|
||||
public interface IDtoStore
|
||||
{
|
||||
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken);
|
||||
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public class InMemoryDtoStore : IDtoStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
|
||||
|
||||
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
|
||||
|
||||
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
{
|
||||
_records[record.DocumentId] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
|
||||
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
{
|
||||
_records.TryGetValue(documentId, out var record);
|
||||
return Task.FromResult<DtoRecord?>(record);
|
||||
}
|
||||
_records[record.DocumentId] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
|
||||
public sealed class RawDocumentStorage
|
||||
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(documentId, out var record);
|
||||
return Task.FromResult<DtoRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, CancellationToken cancellationToken)
|
||||
{
|
||||
var matches = _records.Values.Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase)).ToArray();
|
||||
return Task.FromResult<IReadOnlyList<DtoRecord>>(matches);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class RawDocumentStorage
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, byte[]> _blobs = new();
|
||||
|
||||
@@ -155,16 +214,27 @@ namespace StellaOps.Concelier.Storage.Mongo
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SourceStateRecord(string SourceName, MongoDB.Bson.BsonDocument? Cursor, DateTimeOffset UpdatedAt);
|
||||
public sealed record SourceStateRecord(
|
||||
string SourceName,
|
||||
bool Enabled,
|
||||
bool Paused,
|
||||
MongoDB.Bson.BsonDocument? Cursor,
|
||||
DateTimeOffset? LastSuccess,
|
||||
DateTimeOffset? LastFailure,
|
||||
int FailCount,
|
||||
DateTimeOffset? BackoffUntil,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? LastFailureReason);
|
||||
|
||||
public interface ISourceStateRepository
|
||||
{
|
||||
Task<SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken);
|
||||
Task UpdateCursorAsync(string sourceName, MongoDB.Bson.BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken);
|
||||
Task MarkFailureAsync(string sourceName, DateTimeOffset now, TimeSpan backoff, string reason, CancellationToken cancellationToken);
|
||||
Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class InMemorySourceStateRepository : ISourceStateRepository
|
||||
public class InMemorySourceStateRepository : ISourceStateRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, SourceStateRecord> _states = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
@@ -176,15 +246,59 @@ namespace StellaOps.Concelier.Storage.Mongo
|
||||
|
||||
public Task UpdateCursorAsync(string sourceName, MongoDB.Bson.BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
_states[sourceName] = new SourceStateRecord(sourceName, cursor.DeepClone(), completedAt);
|
||||
var current = _states.TryGetValue(sourceName, out var existing) ? existing : null;
|
||||
_states[sourceName] = new SourceStateRecord(
|
||||
sourceName,
|
||||
Enabled: current?.Enabled ?? true,
|
||||
Paused: current?.Paused ?? false,
|
||||
Cursor: cursor.DeepClone(),
|
||||
LastSuccess: completedAt,
|
||||
LastFailure: current?.LastFailure,
|
||||
FailCount: current?.FailCount ?? 0,
|
||||
BackoffUntil: current?.BackoffUntil,
|
||||
UpdatedAt: completedAt,
|
||||
LastFailureReason: current?.LastFailureReason);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task MarkFailureAsync(string sourceName, DateTimeOffset now, TimeSpan backoff, string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
_states[sourceName] = new SourceStateRecord(sourceName, null, now.Add(backoff));
|
||||
_states[sourceName] = new SourceStateRecord(
|
||||
sourceName,
|
||||
Enabled: true,
|
||||
Paused: false,
|
||||
Cursor: null,
|
||||
LastSuccess: null,
|
||||
LastFailure: now,
|
||||
FailCount: (_states.TryGetValue(sourceName, out var existing) ? existing.FailCount : 0) + 1,
|
||||
BackoffUntil: now.Add(backoff),
|
||||
UpdatedAt: now,
|
||||
LastFailureReason: reason);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_states[record.SourceName] = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
public class MongoSourceStateRepository : ISourceStateRepository
|
||||
{
|
||||
private readonly InMemorySourceStateRepository _inner = new();
|
||||
|
||||
public Task<SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken)
|
||||
=> _inner.TryGetAsync(sourceName, cancellationToken);
|
||||
|
||||
public Task UpdateCursorAsync(string sourceName, MongoDB.Bson.BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken)
|
||||
=> _inner.UpdateCursorAsync(sourceName, cursor, completedAt, cancellationToken);
|
||||
|
||||
public Task MarkFailureAsync(string sourceName, DateTimeOffset now, TimeSpan backoff, string reason, CancellationToken cancellationToken)
|
||||
=> _inner.MarkFailureAsync(sourceName, now, backoff, reason, cancellationToken);
|
||||
|
||||
public Task UpsertAsync(SourceStateRecord record, CancellationToken cancellationToken)
|
||||
=> _inner.UpsertAsync(record, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -244,7 +358,7 @@ namespace StellaOps.Concelier.Storage.Mongo.Aliases
|
||||
}
|
||||
|
||||
public sealed record AliasEntry(string Scheme, string Value);
|
||||
public sealed record AliasRecord(string AdvisoryKey, string Scheme, string Value);
|
||||
public sealed record AliasRecord(string AdvisoryKey, string Scheme, string Value, DateTimeOffset? UpdatedAt = null);
|
||||
public sealed record AliasCollision(string Scheme, string Value, IReadOnlyList<string> AdvisoryKeys);
|
||||
|
||||
public interface IAliasStore
|
||||
@@ -405,6 +519,39 @@ namespace StellaOps.Concelier.Storage.Mongo.Exporting
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.JpFlags
|
||||
{
|
||||
public sealed record JpFlagRecord(
|
||||
string AdvisoryKey,
|
||||
string SourceName,
|
||||
string Category,
|
||||
string? VendorStatus,
|
||||
DateTimeOffset CreatedAt);
|
||||
|
||||
public interface IJpFlagStore
|
||||
{
|
||||
Task UpsertAsync(JpFlagRecord record, CancellationToken cancellationToken);
|
||||
Task<JpFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class InMemoryJpFlagStore : IJpFlagStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, JpFlagRecord> _records = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public Task UpsertAsync(JpFlagRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[record.AdvisoryKey] = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<JpFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(advisoryKey, out var record);
|
||||
return Task.FromResult<JpFlagRecord?>(record);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.MergeEvents
|
||||
{
|
||||
public sealed record MergeEventRecord(
|
||||
@@ -452,6 +599,55 @@ namespace StellaOps.Concelier.Storage.Mongo.MergeEvents
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Documents
|
||||
{
|
||||
using DocumentRecord = StellaOps.Concelier.Storage.Mongo.DocumentRecord;
|
||||
using IDocumentStore = StellaOps.Concelier.Storage.Mongo.IDocumentStore;
|
||||
using InMemoryDocumentStore = StellaOps.Concelier.Storage.Mongo.InMemoryDocumentStore;
|
||||
using ISourceStateRepository = StellaOps.Concelier.Storage.Mongo.ISourceStateRepository;
|
||||
using InMemorySourceStateRepository = StellaOps.Concelier.Storage.Mongo.InMemorySourceStateRepository;
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Dtos
|
||||
{
|
||||
using DtoRecord = StellaOps.Concelier.Storage.Mongo.DtoRecord;
|
||||
using IDtoStore = StellaOps.Concelier.Storage.Mongo.IDtoStore;
|
||||
using InMemoryDtoStore = StellaOps.Concelier.Storage.Mongo.InMemoryDtoStore;
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags
|
||||
{
|
||||
public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt);
|
||||
|
||||
public interface IPsirtFlagStore
|
||||
{
|
||||
Task UpsertAsync(PsirtFlagRecord flag, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<PsirtFlagRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class InMemoryPsirtFlagStore : IPsirtFlagStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, PsirtFlagRecord> _records = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public Task UpsertAsync(PsirtFlagRecord flag, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[flag.AdvisoryId] = flag;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<PsirtFlagRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
|
||||
{
|
||||
var records = _records.Values
|
||||
.Where(f => string.Equals(f.AdvisoryId, advisoryKey, StringComparison.OrdinalIgnoreCase))
|
||||
.OrderByDescending(f => f.RecordedAt)
|
||||
.Take(limit)
|
||||
.ToArray();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<PsirtFlagRecord>>(records);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo
|
||||
{
|
||||
// Already defined above; kept for backward compatibility with legacy using directives.
|
||||
|
||||
@@ -20,19 +20,19 @@ public sealed class PostgresDocumentStore : IDocumentStore
|
||||
_sourceRepository = sourceRepository ?? throw new ArgumentNullException(nameof(sourceRepository));
|
||||
}
|
||||
|
||||
public async Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
public async Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
|
||||
{
|
||||
var row = await _repository.FindAsync(id, cancellationToken).ConfigureAwait(false);
|
||||
return row is null ? null : Map(row);
|
||||
}
|
||||
|
||||
public async Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
public async Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
{
|
||||
var row = await _repository.FindBySourceAndUriAsync(sourceName, uri, cancellationToken).ConfigureAwait(false);
|
||||
return row is null ? null : Map(row);
|
||||
}
|
||||
|
||||
public async Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
public async Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
// Ensure source exists
|
||||
var source = await _sourceRepository.GetByKeyAsync(record.SourceName, cancellationToken).ConfigureAwait(false)
|
||||
@@ -59,7 +59,7 @@ public sealed class PostgresDocumentStore : IDocumentStore
|
||||
return Map(saved);
|
||||
}
|
||||
|
||||
public async Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, MongoDB.Driver.IClientSessionHandle? session = null)
|
||||
public async Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken)
|
||||
{
|
||||
await _repository.UpdateStatusAsync(id, status, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisories
|
||||
WHERE id = @id
|
||||
@@ -69,7 +69,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisories
|
||||
WHERE advisory_key = @advisory_key
|
||||
@@ -88,7 +88,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisories
|
||||
WHERE primary_vuln_id = @vuln_id
|
||||
@@ -107,7 +107,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT a.id, a.advisory_key, a.primary_vuln_id, a.source_id, a.title, a.summary, a.description,
|
||||
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_payload::text,
|
||||
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_Payload::text,
|
||||
a.created_at, a.updated_at
|
||||
FROM vuln.advisories a
|
||||
JOIN vuln.advisory_aliases al ON al.advisory_id = a.id
|
||||
@@ -132,7 +132,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT a.id, a.advisory_key, a.primary_vuln_id, a.source_id, a.title, a.summary, a.description,
|
||||
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_payload::text,
|
||||
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_Payload::text,
|
||||
a.created_at, a.updated_at
|
||||
FROM vuln.advisories a
|
||||
JOIN vuln.advisory_affected af ON af.advisory_id = a.id
|
||||
@@ -164,7 +164,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT a.id, a.advisory_key, a.primary_vuln_id, a.source_id, a.title, a.summary, a.description,
|
||||
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_payload::text,
|
||||
a.severity, a.published_at, a.modified_at, a.withdrawn_at, a.provenance::text, a.raw_Payload::text,
|
||||
a.created_at, a.updated_at
|
||||
FROM vuln.advisories a
|
||||
JOIN vuln.advisory_affected af ON af.advisory_id = a.id
|
||||
@@ -196,7 +196,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
var sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at,
|
||||
ts_rank(search_vector, websearch_to_tsquery('english', @query)) as rank
|
||||
FROM vuln.advisories
|
||||
@@ -236,7 +236,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisories
|
||||
WHERE severity = @severity
|
||||
@@ -265,7 +265,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisories
|
||||
WHERE modified_at > @since
|
||||
@@ -294,7 +294,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
FROM vuln.advisories
|
||||
WHERE source_id = @source_id
|
||||
@@ -370,7 +370,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
)
|
||||
VALUES (
|
||||
@id, @advisory_key, @primary_vuln_id, @source_id, @title, @summary, @description,
|
||||
@severity, @published_at, @modified_at, @withdrawn_at, @provenance::jsonb, @raw_payload::jsonb
|
||||
@severity, @published_at, @modified_at, @withdrawn_at, @provenance::jsonb, @raw_Payload::jsonb
|
||||
)
|
||||
ON CONFLICT (advisory_key) DO UPDATE SET
|
||||
primary_vuln_id = EXCLUDED.primary_vuln_id,
|
||||
@@ -386,7 +386,7 @@ public sealed class AdvisoryRepository : RepositoryBase<ConcelierDataSource>, IA
|
||||
raw_payload = EXCLUDED.raw_payload,
|
||||
updated_at = NOW()
|
||||
RETURNING id, advisory_key, primary_vuln_id, source_id, title, summary, description,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_payload::text,
|
||||
severity, published_at, modified_at, withdrawn_at, provenance::text, raw_Payload::text,
|
||||
created_at, updated_at
|
||||
""";
|
||||
|
||||
|
||||
@@ -44,6 +44,7 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<ISourceStateRepository, SourceStateRepository>();
|
||||
services.AddScoped<MongoAdvisories.IAdvisoryStore, PostgresAdvisoryStore>();
|
||||
services.AddScoped<IDocumentRepository, DocumentRepository>();
|
||||
services.AddScoped<MongoContracts.ISourceStateRepository, PostgresSourceStateAdapter>();
|
||||
services.AddScoped<IFeedSnapshotRepository, FeedSnapshotRepository>();
|
||||
services.AddScoped<IAdvisorySnapshotRepository, AdvisorySnapshotRepository>();
|
||||
services.AddScoped<IMergeEventRepository, MergeEventRepository>();
|
||||
@@ -81,6 +82,7 @@ public static class ServiceCollectionExtensions
|
||||
services.AddScoped<ISourceStateRepository, SourceStateRepository>();
|
||||
services.AddScoped<MongoAdvisories.IAdvisoryStore, PostgresAdvisoryStore>();
|
||||
services.AddScoped<IDocumentRepository, DocumentRepository>();
|
||||
services.AddScoped<MongoContracts.ISourceStateRepository, PostgresSourceStateAdapter>();
|
||||
services.AddScoped<IFeedSnapshotRepository, FeedSnapshotRepository>();
|
||||
services.AddScoped<IAdvisorySnapshotRepository, AdvisorySnapshotRepository>();
|
||||
services.AddScoped<IMergeEventRepository, MergeEventRepository>();
|
||||
|
||||
@@ -0,0 +1,173 @@
|
||||
using System;
|
||||
using System.Text.Json;
|
||||
using System.Collections.Generic;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Postgres.Models;
|
||||
using StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
using MongoContracts = StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// Adapter that satisfies the legacy source state contract using PostgreSQL storage.
|
||||
/// </summary>
|
||||
public sealed class PostgresSourceStateAdapter : MongoContracts.ISourceStateRepository
|
||||
{
|
||||
private readonly ISourceRepository _sourceRepository;
|
||||
private readonly Repositories.ISourceStateRepository _stateRepository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public PostgresSourceStateAdapter(
|
||||
ISourceRepository sourceRepository,
|
||||
Repositories.ISourceStateRepository stateRepository,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_sourceRepository = sourceRepository ?? throw new ArgumentNullException(nameof(sourceRepository));
|
||||
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<MongoContracts.SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(sourceName);
|
||||
|
||||
var source = await _sourceRepository.GetByKeyAsync(sourceName, cancellationToken).ConfigureAwait(false);
|
||||
if (source is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var state = await _stateRepository.GetBySourceIdAsync(source.Id, cancellationToken).ConfigureAwait(false);
|
||||
if (state is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var cursor = string.IsNullOrWhiteSpace(state.Cursor) ? null : BsonDocument.Parse(state.Cursor);
|
||||
return new MongoContracts.SourceStateRecord(
|
||||
sourceName,
|
||||
Enabled: true,
|
||||
Paused: false,
|
||||
Cursor: cursor,
|
||||
LastSuccess: state.LastSuccessAt,
|
||||
LastFailure: state.LastError is null ? null : state.LastSyncAt,
|
||||
FailCount: state.ErrorCount,
|
||||
BackoffUntil: null,
|
||||
UpdatedAt: state.UpdatedAt,
|
||||
LastFailureReason: state.LastError);
|
||||
}
|
||||
|
||||
public async Task UpdateCursorAsync(string sourceName, BsonDocument cursor, DateTimeOffset completedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(sourceName);
|
||||
ArgumentNullException.ThrowIfNull(cursor);
|
||||
|
||||
var source = await EnsureSourceAsync(sourceName, cancellationToken).ConfigureAwait(false);
|
||||
var existing = await _stateRepository.GetBySourceIdAsync(source.Id, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var entity = new SourceStateEntity
|
||||
{
|
||||
Id = existing?.Id ?? Guid.NewGuid(),
|
||||
SourceId = source.Id,
|
||||
Cursor = cursor.ToJson(),
|
||||
LastSyncAt = completedAt,
|
||||
LastSuccessAt = completedAt,
|
||||
LastError = null,
|
||||
SyncCount = (existing?.SyncCount ?? 0) + 1,
|
||||
ErrorCount = existing?.ErrorCount ?? 0,
|
||||
Metadata = existing?.Metadata ?? "{}",
|
||||
UpdatedAt = completedAt
|
||||
};
|
||||
|
||||
_ = await _stateRepository.UpsertAsync(entity, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task MarkFailureAsync(string sourceName, DateTimeOffset now, TimeSpan backoff, string reason, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(sourceName);
|
||||
|
||||
var source = await EnsureSourceAsync(sourceName, cancellationToken).ConfigureAwait(false);
|
||||
var existing = await _stateRepository.GetBySourceIdAsync(source.Id, cancellationToken).ConfigureAwait(false);
|
||||
var backoffUntil = SafeAdd(now, backoff);
|
||||
|
||||
var metadata = new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["backoffUntil"] = backoffUntil.ToString("O"),
|
||||
["reason"] = reason
|
||||
};
|
||||
|
||||
var entity = new SourceStateEntity
|
||||
{
|
||||
Id = existing?.Id ?? Guid.NewGuid(),
|
||||
SourceId = source.Id,
|
||||
Cursor = existing?.Cursor,
|
||||
LastSyncAt = now,
|
||||
LastSuccessAt = existing?.LastSuccessAt,
|
||||
LastError = reason,
|
||||
SyncCount = existing?.SyncCount ?? 0,
|
||||
ErrorCount = (existing?.ErrorCount ?? 0) + 1,
|
||||
Metadata = JsonSerializer.Serialize(metadata, new JsonSerializerOptions(JsonSerializerDefaults.Web)),
|
||||
UpdatedAt = now
|
||||
};
|
||||
|
||||
_ = await _stateRepository.UpsertAsync(entity, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task UpsertAsync(MongoContracts.SourceStateRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
var source = await EnsureSourceAsync(record.SourceName, cancellationToken).ConfigureAwait(false);
|
||||
var entity = new SourceStateEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
SourceId = source.Id,
|
||||
Cursor = record.Cursor?.ToJson(),
|
||||
LastSyncAt = record.UpdatedAt,
|
||||
LastSuccessAt = record.LastSuccess,
|
||||
LastError = record.LastFailureReason,
|
||||
SyncCount = record.FailCount,
|
||||
ErrorCount = record.FailCount,
|
||||
Metadata = "{}",
|
||||
UpdatedAt = record.UpdatedAt
|
||||
};
|
||||
|
||||
_ = await _stateRepository.UpsertAsync(entity, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task<SourceEntity> EnsureSourceAsync(string sourceName, CancellationToken cancellationToken)
|
||||
{
|
||||
var existing = await _sourceRepository.GetByKeyAsync(sourceName, cancellationToken).ConfigureAwait(false);
|
||||
if (existing is not null)
|
||||
{
|
||||
return existing;
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
return await _sourceRepository.UpsertAsync(new SourceEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Key = sourceName,
|
||||
Name = sourceName,
|
||||
SourceType = sourceName,
|
||||
Url = null,
|
||||
Priority = 0,
|
||||
Enabled = true,
|
||||
Config = "{}",
|
||||
Metadata = "{}",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static DateTimeOffset SafeAdd(DateTimeOffset value, TimeSpan delta)
|
||||
{
|
||||
try
|
||||
{
|
||||
return value.Add(delta);
|
||||
}
|
||||
catch (ArgumentOutOfRangeException)
|
||||
{
|
||||
return delta < TimeSpan.Zero ? DateTimeOffset.MinValue : DateTimeOffset.MaxValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,6 @@
|
||||
<IsTestProject>false</IsTestProject>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Mongo2Go" Version="3.1.3" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
@@ -16,4 +15,4 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -16,7 +16,7 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -10,8 +10,8 @@ using Microsoft.Extensions.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Connector.Cccs;
|
||||
using StellaOps.Concelier.Connector.Cccs.Configuration;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
@@ -19,7 +19,7 @@ using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
@@ -80,19 +80,19 @@ public sealed class CccsConnectorTests : IAsyncLifetime
|
||||
await using var provider = await BuildServiceProviderAsync();
|
||||
SeedFeedResponses();
|
||||
|
||||
var connector = provider.GetRequiredService<CccsConnector>();
|
||||
await connector.FetchAsync(provider, CancellationToken.None);
|
||||
|
||||
var mongo = provider.GetRequiredService<IMongoDatabase>();
|
||||
var docCollection = mongo.GetCollection<BsonDocument>("document");
|
||||
var documentsSnapshot = await docCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
|
||||
System.IO.Directory.CreateDirectory(System.IO.Path.Combine(AppContext.BaseDirectory, "tmp"));
|
||||
var debugPath = System.IO.Path.Combine(AppContext.BaseDirectory, "tmp", "cccs-documents.json");
|
||||
await System.IO.File.WriteAllTextAsync(debugPath, documentsSnapshot.ToJson(new MongoDB.Bson.IO.JsonWriterSettings { Indent = true }));
|
||||
|
||||
var documentStore = provider.GetRequiredService<IDocumentStore>();
|
||||
var document = await documentStore.FindBySourceAndUriAsync(CccsConnectorPlugin.SourceName, "https://www.cyber.gc.ca/en/alerts-advisories/test-advisory", CancellationToken.None);
|
||||
var connector = provider.GetRequiredService<CccsConnector>();
|
||||
await connector.FetchAsync(provider, CancellationToken.None);
|
||||
|
||||
var mongo = provider.GetRequiredService<IMongoDatabase>();
|
||||
var docCollection = mongo.GetCollection<BsonDocument>("document");
|
||||
var documentsSnapshot = await docCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
|
||||
System.IO.Directory.CreateDirectory(System.IO.Path.Combine(AppContext.BaseDirectory, "tmp"));
|
||||
var debugPath = System.IO.Path.Combine(AppContext.BaseDirectory, "tmp", "cccs-documents.json");
|
||||
await System.IO.File.WriteAllTextAsync(debugPath, documentsSnapshot.ToJson(new MongoDB.Bson.IO.JsonWriterSettings { Indent = true }));
|
||||
|
||||
var documentStore = provider.GetRequiredService<IDocumentStore>();
|
||||
var document = await documentStore.FindBySourceAndUriAsync(CccsConnectorPlugin.SourceName, "https://www.cyber.gc.ca/en/alerts-advisories/test-advisory", CancellationToken.None);
|
||||
document.Should().NotBeNull();
|
||||
document!.Status.Should().Be(DocumentStatuses.PendingParse);
|
||||
document.Metadata.Should().ContainKey("cccs.language").WhoseValue.Should().Be("en");
|
||||
|
||||
@@ -4,7 +4,7 @@ using StellaOps.Concelier.Connector.Cccs.Internal;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Html;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cccs.Tests.Internal;
|
||||
|
||||
@@ -12,15 +12,15 @@ using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Connector.CertBund.Configuration;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
@@ -58,27 +58,27 @@ public sealed class CertBundConnectorTests : IAsyncLifetime
|
||||
advisories.Should().HaveCount(1);
|
||||
|
||||
var advisory = advisories[0];
|
||||
advisory.AdvisoryKey.Should().Be("WID-SEC-2025-2264");
|
||||
advisory.Aliases.Should().Contain("CVE-2025-1234");
|
||||
advisory.AffectedPackages.Should().Contain(package => package.Identifier.Contains("Ivanti"));
|
||||
advisory.References.Should().Contain(reference => reference.Url == DetailUri.ToString());
|
||||
advisory.Language.Should().Be("de");
|
||||
|
||||
var endpoint = advisory.AffectedPackages.Should().ContainSingle(p => p.Identifier.Contains("Endpoint Manager") && !p.Identifier.Contains("Cloud"))
|
||||
.Subject;
|
||||
endpoint.VersionRanges.Should().ContainSingle(range =>
|
||||
range.RangeKind == NormalizedVersionSchemes.SemVer &&
|
||||
range.IntroducedVersion == "2023.1" &&
|
||||
range.FixedVersion == "2024.2");
|
||||
endpoint.NormalizedVersions.Should().ContainSingle(rule =>
|
||||
rule.Min == "2023.1" &&
|
||||
rule.Max == "2024.2" &&
|
||||
rule.Notes == "certbund:WID-SEC-2025-2264:ivanti");
|
||||
|
||||
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
|
||||
var state = await stateRepository.TryGetAsync(CertBundConnectorPlugin.SourceName, CancellationToken.None);
|
||||
state.Should().NotBeNull();
|
||||
state!.Cursor.Should().NotBeNull();
|
||||
advisory.AdvisoryKey.Should().Be("WID-SEC-2025-2264");
|
||||
advisory.Aliases.Should().Contain("CVE-2025-1234");
|
||||
advisory.AffectedPackages.Should().Contain(package => package.Identifier.Contains("Ivanti"));
|
||||
advisory.References.Should().Contain(reference => reference.Url == DetailUri.ToString());
|
||||
advisory.Language.Should().Be("de");
|
||||
|
||||
var endpoint = advisory.AffectedPackages.Should().ContainSingle(p => p.Identifier.Contains("Endpoint Manager") && !p.Identifier.Contains("Cloud"))
|
||||
.Subject;
|
||||
endpoint.VersionRanges.Should().ContainSingle(range =>
|
||||
range.RangeKind == NormalizedVersionSchemes.SemVer &&
|
||||
range.IntroducedVersion == "2023.1" &&
|
||||
range.FixedVersion == "2024.2");
|
||||
endpoint.NormalizedVersions.Should().ContainSingle(rule =>
|
||||
rule.Min == "2023.1" &&
|
||||
rule.Max == "2024.2" &&
|
||||
rule.Notes == "certbund:WID-SEC-2025-2264:ivanti");
|
||||
|
||||
var stateRepository = provider.GetRequiredService<ISourceStateRepository>();
|
||||
var state = await stateRepository.TryGetAsync(CertBundConnectorPlugin.SourceName, CancellationToken.None);
|
||||
state.Should().NotBeNull();
|
||||
state!.Cursor.Should().NotBeNull();
|
||||
state.Cursor.TryGetValue("pendingDocuments", out var pendingDocs).Should().BeTrue();
|
||||
pendingDocs!.AsBsonArray.Should().BeEmpty();
|
||||
state.Cursor.TryGetValue("pendingMappings", out var pendingMappings).Should().BeTrue();
|
||||
|
||||
@@ -18,7 +18,7 @@ using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Cursors;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ using StellaOps.Concelier.Connector.Common.Cursors;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
@@ -22,7 +22,7 @@ using StellaOps.Concelier.Connector.Common.Cursors;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
@@ -3,8 +3,8 @@ using System.Globalization;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.CertCc.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertCc.Tests.Internal;
|
||||
|
||||
@@ -22,8 +22,8 @@ using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using StellaOps.Concelier.Models;
|
||||
|
||||
@@ -67,13 +67,13 @@ public sealed class CertFrConnectorTests : IAsyncLifetime
|
||||
var snapshot = SnapshotSerializer.ToSnapshot(advisories.OrderBy(static a => a.AdvisoryKey, StringComparer.Ordinal).ToArray());
|
||||
var expected = ReadFixture("certfr-advisories.snapshot.json");
|
||||
var normalizedSnapshot = Normalize(snapshot);
|
||||
var normalizedExpected = Normalize(expected);
|
||||
if (!string.Equals(normalizedExpected, normalizedSnapshot, StringComparison.Ordinal))
|
||||
{
|
||||
var actualPath = Path.Combine(AppContext.BaseDirectory, "Source", "CertFr", "Fixtures", "certfr-advisories.actual.json");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(actualPath)!);
|
||||
File.WriteAllText(actualPath, snapshot);
|
||||
}
|
||||
var normalizedExpected = Normalize(expected);
|
||||
if (!string.Equals(normalizedExpected, normalizedSnapshot, StringComparison.Ordinal))
|
||||
{
|
||||
var actualPath = Path.Combine(AppContext.BaseDirectory, "Source", "CertFr", "Fixtures", "certfr-advisories.actual.json");
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(actualPath)!);
|
||||
File.WriteAllText(actualPath, snapshot);
|
||||
}
|
||||
|
||||
Assert.Equal(normalizedExpected, normalizedSnapshot);
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.CertIn.Tests;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Text;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Mongo2Go;
|
||||
@@ -12,10 +12,10 @@ using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Core.Aoc;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Common.Tests;
|
||||
|
||||
@@ -23,16 +23,16 @@ public sealed class SourceFetchServiceGuardTests : IAsyncLifetime
|
||||
{
|
||||
private readonly MongoDbRunner _runner;
|
||||
private readonly IMongoDatabase _database;
|
||||
private readonly RawDocumentStorage _rawStorage;
|
||||
private readonly ICryptoHash _hash;
|
||||
private readonly RawDocumentStorage _rawStorage;
|
||||
private readonly ICryptoHash _hash;
|
||||
|
||||
public SourceFetchServiceGuardTests()
|
||||
{
|
||||
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
|
||||
var client = new MongoClient(_runner.ConnectionString);
|
||||
_database = client.GetDatabase($"source-fetch-guard-{Guid.NewGuid():N}");
|
||||
_rawStorage = new RawDocumentStorage(_database);
|
||||
_hash = CryptoHashFactory.CreateDefault();
|
||||
_rawStorage = new RawDocumentStorage();
|
||||
_hash = CryptoHashFactory.CreateDefault();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -55,18 +55,18 @@ public sealed class SourceFetchServiceGuardTests : IAsyncLifetime
|
||||
|
||||
var linksetMapper = new NoopAdvisoryLinksetMapper();
|
||||
|
||||
var service = new SourceFetchService(
|
||||
httpClientFactory,
|
||||
_rawStorage,
|
||||
documentStore,
|
||||
NullLogger<SourceFetchService>.Instance,
|
||||
jitter,
|
||||
guard,
|
||||
linksetMapper,
|
||||
_hash,
|
||||
TimeProvider.System,
|
||||
httpOptions,
|
||||
storageOptions);
|
||||
var service = new SourceFetchService(
|
||||
httpClientFactory,
|
||||
_rawStorage,
|
||||
documentStore,
|
||||
NullLogger<SourceFetchService>.Instance,
|
||||
jitter,
|
||||
guard,
|
||||
linksetMapper,
|
||||
_hash,
|
||||
TimeProvider.System,
|
||||
httpOptions,
|
||||
storageOptions);
|
||||
|
||||
var request = new SourceFetchRequest("client", "vndr.msrc", new Uri("https://example.test/advisories/ADV-1234"))
|
||||
{
|
||||
@@ -85,7 +85,7 @@ public sealed class SourceFetchServiceGuardTests : IAsyncLifetime
|
||||
Assert.Equal("tenant-default", guard.LastDocument!.Tenant);
|
||||
Assert.Equal("msrc", guard.LastDocument.Source.Vendor);
|
||||
Assert.Equal("ADV-1234", guard.LastDocument.Upstream.UpstreamId);
|
||||
var expectedHash = _hash.ComputeHashHex(Encoding.UTF8.GetBytes(responsePayload), HashAlgorithms.Sha256);
|
||||
var expectedHash = _hash.ComputeHashHex(Encoding.UTF8.GetBytes(responsePayload), HashAlgorithms.Sha256);
|
||||
Assert.Equal(expectedHash, guard.LastDocument.Upstream.ContentHash);
|
||||
Assert.NotNull(documentStore.LastRecord);
|
||||
Assert.True(documentStore.UpsertCount > 0);
|
||||
@@ -117,18 +117,18 @@ public sealed class SourceFetchServiceGuardTests : IAsyncLifetime
|
||||
|
||||
var linksetMapper = new NoopAdvisoryLinksetMapper();
|
||||
|
||||
var service = new SourceFetchService(
|
||||
httpClientFactory,
|
||||
_rawStorage,
|
||||
documentStore,
|
||||
NullLogger<SourceFetchService>.Instance,
|
||||
jitter,
|
||||
guard,
|
||||
linksetMapper,
|
||||
_hash,
|
||||
TimeProvider.System,
|
||||
httpOptions,
|
||||
storageOptions);
|
||||
var service = new SourceFetchService(
|
||||
httpClientFactory,
|
||||
_rawStorage,
|
||||
documentStore,
|
||||
NullLogger<SourceFetchService>.Instance,
|
||||
jitter,
|
||||
guard,
|
||||
linksetMapper,
|
||||
_hash,
|
||||
TimeProvider.System,
|
||||
httpOptions,
|
||||
storageOptions);
|
||||
|
||||
var request = new SourceFetchRequest("client", "nvd", new Uri("https://example.test/data/XYZ"))
|
||||
{
|
||||
@@ -191,21 +191,21 @@ public sealed class SourceFetchServiceGuardTests : IAsyncLifetime
|
||||
|
||||
public int UpsertCount { get; private set; }
|
||||
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
UpsertCount++;
|
||||
LastRecord = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
|
||||
=> Task.FromResult<DocumentRecord?>(null);
|
||||
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
|
||||
=> Task.FromResult<DocumentRecord?>(null);
|
||||
|
||||
public Task<bool> UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, IClientSessionHandle? session = null)
|
||||
=> Task.FromResult(false);
|
||||
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class RecordingAdvisoryRawWriteGuard : IAdvisoryRawWriteGuard
|
||||
|
||||
@@ -10,7 +10,7 @@ using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Common.State;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Common.Tests;
|
||||
@@ -32,7 +32,7 @@ public sealed class SourceStateSeedProcessorTests : IAsyncLifetime
|
||||
_client = new MongoClient(_runner.ConnectionString);
|
||||
_database = _client.GetDatabase($"source-state-seed-{Guid.NewGuid():N}");
|
||||
_documentStore = new DocumentStore(_database, NullLogger<DocumentStore>.Instance);
|
||||
_rawStorage = new RawDocumentStorage(_database);
|
||||
_rawStorage = new RawDocumentStorage();
|
||||
_stateRepository = new MongoSourceStateRepository(_database, NullLogger<MongoSourceStateRepository>.Instance);
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 28, 12, 0, 0, TimeSpan.Zero));
|
||||
_hash = CryptoHashFactory.CreateDefault();
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
|
||||
<PackageReference Include="Mongo2Go" Version="4.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
@@ -21,4 +20,4 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -17,8 +17,8 @@ using StellaOps.Concelier.Connector.Cve.Internal;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Cve.Tests;
|
||||
|
||||
@@ -22,8 +22,8 @@ using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Connector.Distro.Debian.Configuration;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
@@ -3,7 +3,7 @@ using Xunit;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Distro.Debian;
|
||||
using StellaOps.Concelier.Connector.Distro.Debian.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.Debian.Tests;
|
||||
|
||||
|
||||
@@ -25,8 +25,8 @@ using StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using StellaOps.Plugin;
|
||||
using Xunit;
|
||||
|
||||
@@ -8,20 +8,20 @@ using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using Microsoft.Extensions.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Http;
|
||||
using StellaOps.Concelier.Connector.Common.Testing;
|
||||
using StellaOps.Concelier.Connector.Distro.Suse;
|
||||
using StellaOps.Concelier.Connector.Distro.Suse.Configuration;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
@@ -72,7 +72,7 @@ public sealed class SuseConnectorTests : IAsyncLifetime
|
||||
|
||||
var open = advisories.Single(a => a.AdvisoryKey == "SUSE-SU-2025:0002-1");
|
||||
var openPackage = Assert.Single(open.AffectedPackages);
|
||||
Assert.Equal(AffectedPackageStatusCatalog.UnderInvestigation, openPackage.Statuses.Single().Status);
|
||||
Assert.Equal(AffectedPackageStatusCatalog.UnderInvestigation, openPackage.Statuses.Single().Status);
|
||||
|
||||
SeedNotModifiedResponses();
|
||||
|
||||
@@ -133,10 +133,10 @@ public sealed class SuseConnectorTests : IAsyncLifetime
|
||||
_handler.AddResponse(AdvisoryOpenUri, () => BuildResponse(HttpStatusCode.OK, "suse-su-2025_0002-1.json", "\"adv-2\""));
|
||||
}
|
||||
|
||||
private void SeedNotModifiedResponses()
|
||||
{
|
||||
_handler.AddResponse(ChangesUri, () => BuildResponse(HttpStatusCode.NotModified, "suse-changes.csv", "\"changes-v1\""));
|
||||
}
|
||||
private void SeedNotModifiedResponses()
|
||||
{
|
||||
_handler.AddResponse(ChangesUri, () => BuildResponse(HttpStatusCode.NotModified, "suse-changes.csv", "\"changes-v1\""));
|
||||
}
|
||||
|
||||
private HttpResponseMessage BuildResponse(HttpStatusCode statusCode, string fixture, string etag)
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user