feat: Add CVSS receipt management endpoints and related functionality
- Introduced new API endpoints for creating, retrieving, amending, and listing CVSS receipts. - Updated IPolicyEngineClient interface to include methods for CVSS receipt operations. - Implemented PolicyEngineClient to handle CVSS receipt requests. - Enhanced Program.cs to map new CVSS receipt routes with appropriate authorization. - Added necessary models and contracts for CVSS receipt requests and responses. - Integrated Postgres document store for managing CVSS receipts and related data. - Updated database schema with new migrations for source documents and payload storage. - Refactored existing components to support new CVSS functionality.
This commit is contained in:
@@ -1,434 +1,434 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.IO;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Distro.RedHat.Configuration;
|
||||
using StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.RedHat;
|
||||
|
||||
public sealed class RedHatConnector : IFeedConnector
|
||||
{
|
||||
private readonly SourceFetchService _fetchService;
|
||||
private readonly RawDocumentStorage _rawDocumentStorage;
|
||||
private readonly IDocumentStore _documentStore;
|
||||
private readonly IDtoStore _dtoStore;
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly ISourceStateRepository _stateRepository;
|
||||
private readonly ILogger<RedHatConnector> _logger;
|
||||
private readonly RedHatOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public RedHatConnector(
|
||||
SourceFetchService fetchService,
|
||||
RawDocumentStorage rawDocumentStorage,
|
||||
IDocumentStore documentStore,
|
||||
IDtoStore dtoStore,
|
||||
IAdvisoryStore advisoryStore,
|
||||
ISourceStateRepository stateRepository,
|
||||
IOptions<RedHatOptions> options,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<RedHatConnector> logger)
|
||||
{
|
||||
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
|
||||
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
|
||||
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
|
||||
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_options.Validate();
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public string SourceName => RedHatConnectorPlugin.SourceName;
|
||||
|
||||
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var baseline = cursor.LastReleasedOn ?? now - _options.InitialBackfill;
|
||||
var overlap = _options.Overlap > TimeSpan.Zero ? _options.Overlap : TimeSpan.Zero;
|
||||
var afterThreshold = baseline - overlap;
|
||||
if (afterThreshold < DateTimeOffset.UnixEpoch)
|
||||
{
|
||||
afterThreshold = DateTimeOffset.UnixEpoch;
|
||||
}
|
||||
|
||||
ProvenanceDiagnostics.ReportResumeWindow(SourceName, afterThreshold, _logger);
|
||||
|
||||
var processedSet = new HashSet<string>(cursor.ProcessedAdvisoryIds, StringComparer.OrdinalIgnoreCase);
|
||||
var newSummaries = new List<RedHatSummaryItem>();
|
||||
var stopDueToOlderData = false;
|
||||
var touchedResources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
for (var page = 1; page <= _options.MaxPagesPerFetch; page++)
|
||||
{
|
||||
var summaryUri = BuildSummaryUri(afterThreshold, page);
|
||||
var summaryKey = summaryUri.ToString();
|
||||
touchedResources.Add(summaryKey);
|
||||
|
||||
var cachedSummary = cursor.TryGetFetchCache(summaryKey);
|
||||
var summaryMetadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["page"] = page.ToString(CultureInfo.InvariantCulture),
|
||||
["type"] = "summary"
|
||||
};
|
||||
|
||||
var summaryRequest = new SourceFetchRequest(RedHatOptions.HttpClientName, SourceName, summaryUri)
|
||||
{
|
||||
Metadata = summaryMetadata,
|
||||
ETag = cachedSummary?.ETag,
|
||||
LastModified = cachedSummary?.LastModified,
|
||||
TimeoutOverride = _options.FetchTimeout,
|
||||
};
|
||||
|
||||
SourceFetchContentResult summaryResult;
|
||||
try
|
||||
{
|
||||
summaryResult = await _fetchService.FetchContentAsync(summaryRequest, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Red Hat Hydra summary fetch failed for {Uri}", summaryUri);
|
||||
throw;
|
||||
}
|
||||
|
||||
if (summaryResult.IsNotModified)
|
||||
{
|
||||
if (page == 1)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!summaryResult.IsSuccess || summaryResult.Content is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
cursor = cursor.WithFetchCache(summaryKey, summaryResult.ETag, summaryResult.LastModified);
|
||||
|
||||
using var document = JsonDocument.Parse(summaryResult.Content);
|
||||
|
||||
if (document.RootElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Red Hat Hydra summary response had unexpected payload kind {Kind} for {Uri}",
|
||||
document.RootElement.ValueKind,
|
||||
summaryUri);
|
||||
break;
|
||||
}
|
||||
|
||||
var pageCount = 0;
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
if (!RedHatSummaryItem.TryParse(element, out var summary))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
pageCount++;
|
||||
|
||||
if (cursor.LastReleasedOn.HasValue)
|
||||
{
|
||||
if (summary.ReleasedOn < cursor.LastReleasedOn.Value - overlap)
|
||||
{
|
||||
stopDueToOlderData = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (summary.ReleasedOn < cursor.LastReleasedOn.Value)
|
||||
{
|
||||
stopDueToOlderData = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (summary.ReleasedOn == cursor.LastReleasedOn.Value && processedSet.Contains(summary.AdvisoryId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
newSummaries.Add(summary);
|
||||
processedSet.Add(summary.AdvisoryId);
|
||||
|
||||
if (newSummaries.Count >= _options.MaxAdvisoriesPerFetch)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (newSummaries.Count >= _options.MaxAdvisoriesPerFetch || stopDueToOlderData)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (pageCount < _options.PageSize)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (newSummaries.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
newSummaries.Sort(static (left, right) =>
|
||||
{
|
||||
var compare = left.ReleasedOn.CompareTo(right.ReleasedOn);
|
||||
return compare != 0
|
||||
? compare
|
||||
: string.CompareOrdinal(left.AdvisoryId, right.AdvisoryId);
|
||||
});
|
||||
|
||||
var pendingDocuments = new HashSet<Guid>(cursor.PendingDocuments);
|
||||
|
||||
foreach (var summary in newSummaries)
|
||||
{
|
||||
var resourceUri = summary.ResourceUri;
|
||||
var resourceKey = resourceUri.ToString();
|
||||
touchedResources.Add(resourceKey);
|
||||
|
||||
var cached = cursor.TryGetFetchCache(resourceKey);
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["advisoryId"] = summary.AdvisoryId,
|
||||
["releasedOn"] = summary.ReleasedOn.ToString("O", CultureInfo.InvariantCulture)
|
||||
};
|
||||
|
||||
var request = new SourceFetchRequest(RedHatOptions.HttpClientName, SourceName, resourceUri)
|
||||
{
|
||||
Metadata = metadata,
|
||||
ETag = cached?.ETag,
|
||||
LastModified = cached?.LastModified,
|
||||
TimeoutOverride = _options.FetchTimeout,
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (result.IsNotModified)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!result.IsSuccess || result.Document is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
pendingDocuments.Add(result.Document.Id);
|
||||
cursor = cursor.WithFetchCache(resourceKey, result.Document.Etag, result.Document.LastModified);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Red Hat Hydra advisory fetch failed for {Uri}", resourceUri);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
var maxRelease = newSummaries.Max(static item => item.ReleasedOn);
|
||||
var idsForMaxRelease = newSummaries
|
||||
.Where(item => item.ReleasedOn == maxRelease)
|
||||
.Select(item => item.AdvisoryId)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
RedHatCursor updated;
|
||||
if (cursor.LastReleasedOn.HasValue && maxRelease == cursor.LastReleasedOn.Value)
|
||||
{
|
||||
updated = cursor
|
||||
.WithPendingDocuments(pendingDocuments)
|
||||
.AddProcessedAdvisories(idsForMaxRelease)
|
||||
.PruneFetchCache(touchedResources);
|
||||
}
|
||||
else
|
||||
{
|
||||
updated = cursor
|
||||
.WithPendingDocuments(pendingDocuments)
|
||||
.WithLastReleased(maxRelease, idsForMaxRelease)
|
||||
.PruneFetchCache(touchedResources);
|
||||
}
|
||||
|
||||
await UpdateCursorAsync(updated, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (cursor.PendingDocuments.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var remainingFetch = cursor.PendingDocuments.ToList();
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
|
||||
foreach (var documentId in cursor.PendingDocuments)
|
||||
{
|
||||
DocumentRecord? document = null;
|
||||
|
||||
try
|
||||
{
|
||||
document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
if (document is null)
|
||||
{
|
||||
remainingFetch.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!document.GridFsId.HasValue)
|
||||
{
|
||||
_logger.LogWarning("Red Hat document {DocumentId} missing GridFS content; skipping", document.Id);
|
||||
remainingFetch.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var rawBytes = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false);
|
||||
using var jsonDocument = JsonDocument.Parse(rawBytes);
|
||||
var sanitized = JsonSerializer.Serialize(jsonDocument.RootElement);
|
||||
var payload = BsonDocument.Parse(sanitized);
|
||||
|
||||
var dtoRecord = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
document.Id,
|
||||
SourceName,
|
||||
"redhat.csaf.v2",
|
||||
payload,
|
||||
_timeProvider.GetUtcNow());
|
||||
|
||||
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
remainingFetch.Remove(documentId);
|
||||
if (!pendingMappings.Contains(documentId))
|
||||
{
|
||||
pendingMappings.Add(documentId);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var uri = document?.Uri ?? documentId.ToString();
|
||||
_logger.LogError(ex, "Red Hat CSAF parse failed for {Uri}", uri);
|
||||
remainingFetch.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
}
|
||||
}
|
||||
|
||||
var updatedCursor = cursor
|
||||
.WithPendingDocuments(remainingFetch)
|
||||
.WithPendingMappings(pendingMappings);
|
||||
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (cursor.PendingMappings.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
|
||||
foreach (var documentId in cursor.PendingMappings)
|
||||
{
|
||||
try
|
||||
{
|
||||
var dto = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (dto is null || document is null)
|
||||
{
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var json = dto.Payload.ToJson(new JsonWriterSettings
|
||||
{
|
||||
OutputMode = JsonOutputMode.RelaxedExtendedJson,
|
||||
});
|
||||
|
||||
using var jsonDocument = JsonDocument.Parse(json);
|
||||
var advisory = RedHatMapper.Map(SourceName, dto, document, jsonDocument);
|
||||
if (advisory is null)
|
||||
{
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
|
||||
pendingMappings.Remove(documentId);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Red Hat map failed for document {DocumentId}", documentId);
|
||||
}
|
||||
}
|
||||
|
||||
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task<RedHatCursor> GetCursorAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
|
||||
return RedHatCursor.FromBsonDocument(record?.Cursor);
|
||||
}
|
||||
|
||||
private async Task UpdateCursorAsync(RedHatCursor cursor, CancellationToken cancellationToken)
|
||||
{
|
||||
var completedAt = _timeProvider.GetUtcNow();
|
||||
await _stateRepository.UpdateCursorAsync(SourceName, cursor.ToBsonDocument(), completedAt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private Uri BuildSummaryUri(DateTimeOffset after, int page)
|
||||
{
|
||||
var builder = new UriBuilder(_options.BaseEndpoint);
|
||||
var basePath = builder.Path?.TrimEnd('/') ?? string.Empty;
|
||||
var summaryPath = _options.SummaryPath.TrimStart('/');
|
||||
builder.Path = string.IsNullOrEmpty(basePath)
|
||||
? $"/{summaryPath}"
|
||||
: $"{basePath}/{summaryPath}";
|
||||
|
||||
var parameters = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["after"] = after.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture),
|
||||
["per_page"] = _options.PageSize.ToString(CultureInfo.InvariantCulture),
|
||||
["page"] = page.ToString(CultureInfo.InvariantCulture)
|
||||
};
|
||||
|
||||
builder.Query = string.Join('&', parameters.Select(static kvp =>
|
||||
$"{Uri.EscapeDataString(kvp.Key)}={Uri.EscapeDataString(kvp.Value)}"));
|
||||
return builder.Uri;
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.IO;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Connector.Common;
|
||||
using StellaOps.Concelier.Connector.Common.Fetch;
|
||||
using StellaOps.Concelier.Connector.Distro.RedHat.Configuration;
|
||||
using StellaOps.Concelier.Connector.Distro.RedHat.Internal;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
using StellaOps.Plugin;
|
||||
|
||||
namespace StellaOps.Concelier.Connector.Distro.RedHat;
|
||||
|
||||
public sealed class RedHatConnector : IFeedConnector
|
||||
{
|
||||
private readonly SourceFetchService _fetchService;
|
||||
private readonly RawDocumentStorage _rawDocumentStorage;
|
||||
private readonly IDocumentStore _documentStore;
|
||||
private readonly IDtoStore _dtoStore;
|
||||
private readonly IAdvisoryStore _advisoryStore;
|
||||
private readonly ISourceStateRepository _stateRepository;
|
||||
private readonly ILogger<RedHatConnector> _logger;
|
||||
private readonly RedHatOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public RedHatConnector(
|
||||
SourceFetchService fetchService,
|
||||
RawDocumentStorage rawDocumentStorage,
|
||||
IDocumentStore documentStore,
|
||||
IDtoStore dtoStore,
|
||||
IAdvisoryStore advisoryStore,
|
||||
ISourceStateRepository stateRepository,
|
||||
IOptions<RedHatOptions> options,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<RedHatConnector> logger)
|
||||
{
|
||||
_fetchService = fetchService ?? throw new ArgumentNullException(nameof(fetchService));
|
||||
_rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage));
|
||||
_documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore));
|
||||
_dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore));
|
||||
_advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore));
|
||||
_stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_options.Validate();
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public string SourceName => RedHatConnectorPlugin.SourceName;
|
||||
|
||||
public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var baseline = cursor.LastReleasedOn ?? now - _options.InitialBackfill;
|
||||
var overlap = _options.Overlap > TimeSpan.Zero ? _options.Overlap : TimeSpan.Zero;
|
||||
var afterThreshold = baseline - overlap;
|
||||
if (afterThreshold < DateTimeOffset.UnixEpoch)
|
||||
{
|
||||
afterThreshold = DateTimeOffset.UnixEpoch;
|
||||
}
|
||||
|
||||
ProvenanceDiagnostics.ReportResumeWindow(SourceName, afterThreshold, _logger);
|
||||
|
||||
var processedSet = new HashSet<string>(cursor.ProcessedAdvisoryIds, StringComparer.OrdinalIgnoreCase);
|
||||
var newSummaries = new List<RedHatSummaryItem>();
|
||||
var stopDueToOlderData = false;
|
||||
var touchedResources = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
for (var page = 1; page <= _options.MaxPagesPerFetch; page++)
|
||||
{
|
||||
var summaryUri = BuildSummaryUri(afterThreshold, page);
|
||||
var summaryKey = summaryUri.ToString();
|
||||
touchedResources.Add(summaryKey);
|
||||
|
||||
var cachedSummary = cursor.TryGetFetchCache(summaryKey);
|
||||
var summaryMetadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["page"] = page.ToString(CultureInfo.InvariantCulture),
|
||||
["type"] = "summary"
|
||||
};
|
||||
|
||||
var summaryRequest = new SourceFetchRequest(RedHatOptions.HttpClientName, SourceName, summaryUri)
|
||||
{
|
||||
Metadata = summaryMetadata,
|
||||
ETag = cachedSummary?.ETag,
|
||||
LastModified = cachedSummary?.LastModified,
|
||||
TimeoutOverride = _options.FetchTimeout,
|
||||
};
|
||||
|
||||
SourceFetchContentResult summaryResult;
|
||||
try
|
||||
{
|
||||
summaryResult = await _fetchService.FetchContentAsync(summaryRequest, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Red Hat Hydra summary fetch failed for {Uri}", summaryUri);
|
||||
throw;
|
||||
}
|
||||
|
||||
if (summaryResult.IsNotModified)
|
||||
{
|
||||
if (page == 1)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!summaryResult.IsSuccess || summaryResult.Content is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
cursor = cursor.WithFetchCache(summaryKey, summaryResult.ETag, summaryResult.LastModified);
|
||||
|
||||
using var document = JsonDocument.Parse(summaryResult.Content);
|
||||
|
||||
if (document.RootElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Red Hat Hydra summary response had unexpected payload kind {Kind} for {Uri}",
|
||||
document.RootElement.ValueKind,
|
||||
summaryUri);
|
||||
break;
|
||||
}
|
||||
|
||||
var pageCount = 0;
|
||||
foreach (var element in document.RootElement.EnumerateArray())
|
||||
{
|
||||
if (!RedHatSummaryItem.TryParse(element, out var summary))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
pageCount++;
|
||||
|
||||
if (cursor.LastReleasedOn.HasValue)
|
||||
{
|
||||
if (summary.ReleasedOn < cursor.LastReleasedOn.Value - overlap)
|
||||
{
|
||||
stopDueToOlderData = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (summary.ReleasedOn < cursor.LastReleasedOn.Value)
|
||||
{
|
||||
stopDueToOlderData = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (summary.ReleasedOn == cursor.LastReleasedOn.Value && processedSet.Contains(summary.AdvisoryId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
newSummaries.Add(summary);
|
||||
processedSet.Add(summary.AdvisoryId);
|
||||
|
||||
if (newSummaries.Count >= _options.MaxAdvisoriesPerFetch)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (newSummaries.Count >= _options.MaxAdvisoriesPerFetch || stopDueToOlderData)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (pageCount < _options.PageSize)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (newSummaries.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
newSummaries.Sort(static (left, right) =>
|
||||
{
|
||||
var compare = left.ReleasedOn.CompareTo(right.ReleasedOn);
|
||||
return compare != 0
|
||||
? compare
|
||||
: string.CompareOrdinal(left.AdvisoryId, right.AdvisoryId);
|
||||
});
|
||||
|
||||
var pendingDocuments = new HashSet<Guid>(cursor.PendingDocuments);
|
||||
|
||||
foreach (var summary in newSummaries)
|
||||
{
|
||||
var resourceUri = summary.ResourceUri;
|
||||
var resourceKey = resourceUri.ToString();
|
||||
touchedResources.Add(resourceKey);
|
||||
|
||||
var cached = cursor.TryGetFetchCache(resourceKey);
|
||||
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["advisoryId"] = summary.AdvisoryId,
|
||||
["releasedOn"] = summary.ReleasedOn.ToString("O", CultureInfo.InvariantCulture)
|
||||
};
|
||||
|
||||
var request = new SourceFetchRequest(RedHatOptions.HttpClientName, SourceName, resourceUri)
|
||||
{
|
||||
Metadata = metadata,
|
||||
ETag = cached?.ETag,
|
||||
LastModified = cached?.LastModified,
|
||||
TimeoutOverride = _options.FetchTimeout,
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _fetchService.FetchAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (result.IsNotModified)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!result.IsSuccess || result.Document is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
pendingDocuments.Add(result.Document.Id);
|
||||
cursor = cursor.WithFetchCache(resourceKey, result.Document.Etag, result.Document.LastModified);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Red Hat Hydra advisory fetch failed for {Uri}", resourceUri);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
var maxRelease = newSummaries.Max(static item => item.ReleasedOn);
|
||||
var idsForMaxRelease = newSummaries
|
||||
.Where(item => item.ReleasedOn == maxRelease)
|
||||
.Select(item => item.AdvisoryId)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray();
|
||||
|
||||
RedHatCursor updated;
|
||||
if (cursor.LastReleasedOn.HasValue && maxRelease == cursor.LastReleasedOn.Value)
|
||||
{
|
||||
updated = cursor
|
||||
.WithPendingDocuments(pendingDocuments)
|
||||
.AddProcessedAdvisories(idsForMaxRelease)
|
||||
.PruneFetchCache(touchedResources);
|
||||
}
|
||||
else
|
||||
{
|
||||
updated = cursor
|
||||
.WithPendingDocuments(pendingDocuments)
|
||||
.WithLastReleased(maxRelease, idsForMaxRelease)
|
||||
.PruneFetchCache(touchedResources);
|
||||
}
|
||||
|
||||
await UpdateCursorAsync(updated, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (cursor.PendingDocuments.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var remainingFetch = cursor.PendingDocuments.ToList();
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
|
||||
foreach (var documentId in cursor.PendingDocuments)
|
||||
{
|
||||
DocumentRecord? document = null;
|
||||
|
||||
try
|
||||
{
|
||||
document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
if (document is null)
|
||||
{
|
||||
remainingFetch.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!document.PayloadId.HasValue)
|
||||
{
|
||||
_logger.LogWarning("Red Hat document {DocumentId} missing GridFS content; skipping", document.Id);
|
||||
remainingFetch.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var rawBytes = await _rawDocumentStorage.DownloadAsync(document.PayloadId.Value, cancellationToken).ConfigureAwait(false);
|
||||
using var jsonDocument = JsonDocument.Parse(rawBytes);
|
||||
var sanitized = JsonSerializer.Serialize(jsonDocument.RootElement);
|
||||
var payload = BsonDocument.Parse(sanitized);
|
||||
|
||||
var dtoRecord = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
document.Id,
|
||||
SourceName,
|
||||
"redhat.csaf.v2",
|
||||
payload,
|
||||
_timeProvider.GetUtcNow());
|
||||
|
||||
await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
remainingFetch.Remove(documentId);
|
||||
if (!pendingMappings.Contains(documentId))
|
||||
{
|
||||
pendingMappings.Add(documentId);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var uri = document?.Uri ?? documentId.ToString();
|
||||
_logger.LogError(ex, "Red Hat CSAF parse failed for {Uri}", uri);
|
||||
remainingFetch.Remove(documentId);
|
||||
pendingMappings.Remove(documentId);
|
||||
}
|
||||
}
|
||||
|
||||
var updatedCursor = cursor
|
||||
.WithPendingDocuments(remainingFetch)
|
||||
.WithPendingMappings(pendingMappings);
|
||||
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task MapAsync(IServiceProvider services, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (cursor.PendingMappings.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var pendingMappings = cursor.PendingMappings.ToList();
|
||||
|
||||
foreach (var documentId in cursor.PendingMappings)
|
||||
{
|
||||
try
|
||||
{
|
||||
var dto = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (dto is null || document is null)
|
||||
{
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
var json = dto.Payload.ToJson(new JsonWriterSettings
|
||||
{
|
||||
OutputMode = JsonOutputMode.RelaxedExtendedJson,
|
||||
});
|
||||
|
||||
using var jsonDocument = JsonDocument.Parse(json);
|
||||
var advisory = RedHatMapper.Map(SourceName, dto, document, jsonDocument);
|
||||
if (advisory is null)
|
||||
{
|
||||
pendingMappings.Remove(documentId);
|
||||
continue;
|
||||
}
|
||||
|
||||
await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false);
|
||||
await _documentStore.UpdateStatusAsync(documentId, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false);
|
||||
pendingMappings.Remove(documentId);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Red Hat map failed for document {DocumentId}", documentId);
|
||||
}
|
||||
}
|
||||
|
||||
var updatedCursor = cursor.WithPendingMappings(pendingMappings);
|
||||
await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task<RedHatCursor> GetCursorAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var record = await _stateRepository.TryGetAsync(SourceName, cancellationToken).ConfigureAwait(false);
|
||||
return RedHatCursor.FromBsonDocument(record?.Cursor);
|
||||
}
|
||||
|
||||
private async Task UpdateCursorAsync(RedHatCursor cursor, CancellationToken cancellationToken)
|
||||
{
|
||||
var completedAt = _timeProvider.GetUtcNow();
|
||||
await _stateRepository.UpdateCursorAsync(SourceName, cursor.ToBsonDocument(), completedAt, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private Uri BuildSummaryUri(DateTimeOffset after, int page)
|
||||
{
|
||||
var builder = new UriBuilder(_options.BaseEndpoint);
|
||||
var basePath = builder.Path?.TrimEnd('/') ?? string.Empty;
|
||||
var summaryPath = _options.SummaryPath.TrimStart('/');
|
||||
builder.Path = string.IsNullOrEmpty(basePath)
|
||||
? $"/{summaryPath}"
|
||||
: $"{basePath}/{summaryPath}";
|
||||
|
||||
var parameters = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["after"] = after.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture),
|
||||
["per_page"] = _options.PageSize.ToString(CultureInfo.InvariantCulture),
|
||||
["page"] = page.ToString(CultureInfo.InvariantCulture)
|
||||
};
|
||||
|
||||
builder.Query = string.Join('&', parameters.Select(static kvp =>
|
||||
$"{Uri.EscapeDataString(kvp.Key)}={Uri.EscapeDataString(kvp.Value)}"));
|
||||
return builder.Uri;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user