Add unit tests for VexLens normalizer, CPE parser, product mapper, and PURL parser
- Implemented comprehensive tests for VexLensNormalizer including format detection and normalization scenarios. - Added tests for CpeParser covering CPE 2.3 and 2.2 formats, invalid inputs, and canonical key generation. - Created tests for ProductMapper to validate parsing and matching logic across different strictness levels. - Developed tests for PurlParser to ensure correct parsing of various PURL formats and validation of identifiers. - Introduced stubs for Monaco editor and worker to facilitate testing in the web application. - Updated project file for the test project to include necessary dependencies.
This commit is contained in:
@@ -21,8 +21,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjecti
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{A6802486-A8D3-4623-8D81-04ED23F9D312}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "__Libraries\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{C926373D-5ACB-4E62-96D5-264EF4C61BE5}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "__Libraries\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{2D68125A-0ACD-4015-A8FA-B54284B8A3CB}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge", "__Libraries\StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj", "{7760219F-6C19-4B61-9015-73BB02005C0B}"
|
||||
@@ -179,8 +177,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normali
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels.Tests", "__Tests\StellaOps.Concelier.RawModels.Tests\StellaOps.Concelier.RawModels.Tests.csproj", "{7B995CBB-3D20-4509-9300-EC012C18C4B4}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo.Tests", "__Tests\StellaOps.Concelier.Storage.Mongo.Tests\StellaOps.Concelier.Storage.Mongo.Tests.csproj", "{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService.Tests", "__Tests\StellaOps.Concelier.WebService.Tests\StellaOps.Concelier.WebService.Tests.csproj", "{664A2577-6DA1-42DA-A213-3253017FA4BF}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Analyzers", "__Analyzers", "{176B5A8A-7857-3ECD-1128-3C721BC7F5C6}"
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
/// <summary>
|
||||
/// Stub record for document storage. (Placeholder for full implementation)
|
||||
/// </summary>
|
||||
public sealed record DocumentRecord
|
||||
{
|
||||
public string Id { get; init; } = string.Empty;
|
||||
public string TenantId { get; init; } = string.Empty;
|
||||
public string Source { get; init; } = string.Empty;
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
/// <summary>
|
||||
/// Stub interface for document storage. (Placeholder for full implementation)
|
||||
/// </summary>
|
||||
public interface IDocumentStore
|
||||
{
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
/// <summary>
|
||||
/// Stub interface for source state repository. (Placeholder for full implementation)
|
||||
/// </summary>
|
||||
public interface ISourceStateRepository
|
||||
{
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
/// <summary>
|
||||
/// Stub options for MongoDB storage. (Placeholder for full implementation)
|
||||
/// </summary>
|
||||
public sealed class MongoStorageOptions
|
||||
{
|
||||
public string ConnectionString { get; set; } = string.Empty;
|
||||
public string DatabaseName { get; set; } = string.Empty;
|
||||
}
|
||||
@@ -1,313 +0,0 @@
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using MongoDB.Driver.GridFS;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Service for migrating raw payloads from GridFS to S3-compatible object storage.
|
||||
/// </summary>
|
||||
public sealed class GridFsMigrationService
|
||||
{
|
||||
private readonly IGridFSBucket _gridFs;
|
||||
private readonly IObjectStore _objectStore;
|
||||
private readonly IMigrationTracker _migrationTracker;
|
||||
private readonly ObjectStorageOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<GridFsMigrationService> _logger;
|
||||
|
||||
public GridFsMigrationService(
|
||||
IGridFSBucket gridFs,
|
||||
IObjectStore objectStore,
|
||||
IMigrationTracker migrationTracker,
|
||||
IOptions<ObjectStorageOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<GridFsMigrationService> logger)
|
||||
{
|
||||
_gridFs = gridFs ?? throw new ArgumentNullException(nameof(gridFs));
|
||||
_objectStore = objectStore ?? throw new ArgumentNullException(nameof(objectStore));
|
||||
_migrationTracker = migrationTracker ?? throw new ArgumentNullException(nameof(migrationTracker));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Migrates a single GridFS document to object storage.
|
||||
/// </summary>
|
||||
public async Task<MigrationResult> MigrateAsync(
|
||||
string gridFsId,
|
||||
string tenantId,
|
||||
string sourceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
|
||||
|
||||
// Check if already migrated
|
||||
if (await _migrationTracker.IsMigratedAsync(gridFsId, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
_logger.LogDebug("GridFS {GridFsId} already migrated, skipping", gridFsId);
|
||||
return MigrationResult.AlreadyMigrated(gridFsId);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Download from GridFS
|
||||
var objectId = ObjectId.Parse(gridFsId);
|
||||
using var downloadStream = new MemoryStream();
|
||||
await _gridFs.DownloadToStreamAsync(objectId, downloadStream, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var data = downloadStream.ToArray();
|
||||
var sha256 = ComputeSha256(data);
|
||||
|
||||
// Get GridFS file info
|
||||
var filter = Builders<GridFSFileInfo>.Filter.Eq("_id", objectId);
|
||||
var fileInfo = await _gridFs.Find(filter)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var ingestedAt = fileInfo?.UploadDateTime ?? _timeProvider.GetUtcNow().UtcDateTime;
|
||||
|
||||
// Create provenance metadata
|
||||
var provenance = new ProvenanceMetadata
|
||||
{
|
||||
SourceId = sourceId,
|
||||
IngestedAt = new DateTimeOffset(ingestedAt, TimeSpan.Zero),
|
||||
TenantId = tenantId,
|
||||
OriginalFormat = DetectFormat(fileInfo?.Filename),
|
||||
OriginalSize = data.Length,
|
||||
GridFsLegacyId = gridFsId,
|
||||
Transformations =
|
||||
[
|
||||
new TransformationRecord
|
||||
{
|
||||
Type = TransformationType.Migration,
|
||||
Timestamp = _timeProvider.GetUtcNow(),
|
||||
Agent = "concelier-gridfs-migration-v1"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Store in object storage
|
||||
var reference = await _objectStore.StoreAsync(
|
||||
tenantId,
|
||||
data,
|
||||
provenance,
|
||||
GetContentType(fileInfo?.Filename),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Record migration
|
||||
await _migrationTracker.RecordMigrationAsync(
|
||||
gridFsId,
|
||||
reference.Pointer,
|
||||
MigrationStatus.Migrated,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Migrated GridFS {GridFsId} to {Bucket}/{Key}, size {Size} bytes",
|
||||
gridFsId, reference.Pointer.Bucket, reference.Pointer.Key, data.Length);
|
||||
|
||||
return MigrationResult.Success(gridFsId, reference);
|
||||
}
|
||||
catch (GridFSFileNotFoundException)
|
||||
{
|
||||
_logger.LogWarning("GridFS file not found: {GridFsId}", gridFsId);
|
||||
return MigrationResult.NotFound(gridFsId);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to migrate GridFS {GridFsId}", gridFsId);
|
||||
return MigrationResult.Failed(gridFsId, ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a migrated document by comparing hashes.
|
||||
/// </summary>
|
||||
public async Task<bool> VerifyMigrationAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
|
||||
var record = await _migrationTracker.GetByGridFsIdAsync(gridFsId, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (record is null)
|
||||
{
|
||||
_logger.LogWarning("No migration record found for {GridFsId}", gridFsId);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Download original from GridFS
|
||||
var objectId = ObjectId.Parse(gridFsId);
|
||||
using var downloadStream = new MemoryStream();
|
||||
|
||||
try
|
||||
{
|
||||
await _gridFs.DownloadToStreamAsync(objectId, downloadStream, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
catch (GridFSFileNotFoundException)
|
||||
{
|
||||
_logger.LogWarning("Original GridFS file not found for verification: {GridFsId}", gridFsId);
|
||||
return false;
|
||||
}
|
||||
|
||||
var originalHash = ComputeSha256(downloadStream.ToArray());
|
||||
|
||||
// Verify the migrated object
|
||||
var reference = PayloadReference.CreateObjectStorage(record.Pointer, new ProvenanceMetadata
|
||||
{
|
||||
SourceId = string.Empty,
|
||||
IngestedAt = record.MigratedAt,
|
||||
TenantId = string.Empty,
|
||||
});
|
||||
|
||||
var verified = await _objectStore.VerifyIntegrityAsync(reference, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (verified && string.Equals(originalHash, record.Pointer.Sha256, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
await _migrationTracker.MarkVerifiedAsync(gridFsId, cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation("Verified migration for {GridFsId}", gridFsId);
|
||||
return true;
|
||||
}
|
||||
|
||||
_logger.LogWarning(
|
||||
"Verification failed for {GridFsId}: original hash {Original}, stored hash {Stored}",
|
||||
gridFsId, originalHash, record.Pointer.Sha256);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Batches migration of multiple GridFS documents.
|
||||
/// </summary>
|
||||
public async Task<BatchMigrationResult> MigrateBatchAsync(
|
||||
IEnumerable<GridFsMigrationRequest> requests,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var results = new List<MigrationResult>();
|
||||
|
||||
foreach (var request in requests)
|
||||
{
|
||||
if (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var result = await MigrateAsync(
|
||||
request.GridFsId,
|
||||
request.TenantId,
|
||||
request.SourceId,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
results.Add(result);
|
||||
}
|
||||
|
||||
return new BatchMigrationResult(results);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static OriginalFormat? DetectFormat(string? filename)
|
||||
{
|
||||
if (string.IsNullOrEmpty(filename))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return Path.GetExtension(filename).ToLowerInvariant() switch
|
||||
{
|
||||
".json" => OriginalFormat.Json,
|
||||
".xml" => OriginalFormat.Xml,
|
||||
".csv" => OriginalFormat.Csv,
|
||||
".ndjson" => OriginalFormat.Ndjson,
|
||||
".yaml" or ".yml" => OriginalFormat.Yaml,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetContentType(string? filename)
|
||||
{
|
||||
if (string.IsNullOrEmpty(filename))
|
||||
{
|
||||
return "application/octet-stream";
|
||||
}
|
||||
|
||||
return Path.GetExtension(filename).ToLowerInvariant() switch
|
||||
{
|
||||
".json" => "application/json",
|
||||
".xml" => "application/xml",
|
||||
".csv" => "text/csv",
|
||||
".ndjson" => "application/x-ndjson",
|
||||
".yaml" or ".yml" => "application/x-yaml",
|
||||
_ => "application/octet-stream"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to migrate a GridFS document.
|
||||
/// </summary>
|
||||
public sealed record GridFsMigrationRequest(
|
||||
string GridFsId,
|
||||
string TenantId,
|
||||
string SourceId);
|
||||
|
||||
/// <summary>
|
||||
/// Result of a single migration.
|
||||
/// </summary>
|
||||
public sealed record MigrationResult
|
||||
{
|
||||
public required string GridFsId { get; init; }
|
||||
public required MigrationResultStatus Status { get; init; }
|
||||
public PayloadReference? Reference { get; init; }
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
public static MigrationResult Success(string gridFsId, PayloadReference reference)
|
||||
=> new() { GridFsId = gridFsId, Status = MigrationResultStatus.Success, Reference = reference };
|
||||
|
||||
public static MigrationResult AlreadyMigrated(string gridFsId)
|
||||
=> new() { GridFsId = gridFsId, Status = MigrationResultStatus.AlreadyMigrated };
|
||||
|
||||
public static MigrationResult NotFound(string gridFsId)
|
||||
=> new() { GridFsId = gridFsId, Status = MigrationResultStatus.NotFound };
|
||||
|
||||
public static MigrationResult Failed(string gridFsId, string errorMessage)
|
||||
=> new() { GridFsId = gridFsId, Status = MigrationResultStatus.Failed, ErrorMessage = errorMessage };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of a migration result.
|
||||
/// </summary>
|
||||
public enum MigrationResultStatus
|
||||
{
|
||||
Success,
|
||||
AlreadyMigrated,
|
||||
NotFound,
|
||||
Failed
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a batch migration.
|
||||
/// </summary>
|
||||
public sealed record BatchMigrationResult(IReadOnlyList<MigrationResult> Results)
|
||||
{
|
||||
public int TotalCount => Results.Count;
|
||||
public int SuccessCount => Results.Count(r => r.Status == MigrationResultStatus.Success);
|
||||
public int AlreadyMigratedCount => Results.Count(r => r.Status == MigrationResultStatus.AlreadyMigrated);
|
||||
public int NotFoundCount => Results.Count(r => r.Status == MigrationResultStatus.NotFound);
|
||||
public int FailedCount => Results.Count(r => r.Status == MigrationResultStatus.Failed);
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Tracks GridFS to S3 migrations.
|
||||
/// </summary>
|
||||
public interface IMigrationTracker
|
||||
{
|
||||
/// <summary>
|
||||
/// Records a migration attempt.
|
||||
/// </summary>
|
||||
Task<MigrationRecord> RecordMigrationAsync(
|
||||
string gridFsId,
|
||||
ObjectPointer pointer,
|
||||
MigrationStatus status,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Updates a migration record status.
|
||||
/// </summary>
|
||||
Task UpdateStatusAsync(
|
||||
string gridFsId,
|
||||
MigrationStatus status,
|
||||
string? errorMessage = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Marks a migration as verified.
|
||||
/// </summary>
|
||||
Task MarkVerifiedAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a migration record by GridFS ID.
|
||||
/// </summary>
|
||||
Task<MigrationRecord?> GetByGridFsIdAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Lists pending migrations.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<MigrationRecord>> ListPendingAsync(
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Lists migrations needing verification.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<MigrationRecord>> ListNeedingVerificationAsync(
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a GridFS ID has been migrated.
|
||||
/// </summary>
|
||||
Task<bool> IsMigratedAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Abstraction for S3-compatible object storage operations.
|
||||
/// </summary>
|
||||
public interface IObjectStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Stores a payload, returning a reference (either inline or object storage).
|
||||
/// Automatically decides based on size thresholds.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier for bucket selection.</param>
|
||||
/// <param name="data">Payload data to store.</param>
|
||||
/// <param name="provenance">Provenance metadata for the payload.</param>
|
||||
/// <param name="contentType">MIME type of the content.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Reference to the stored payload.</returns>
|
||||
Task<PayloadReference> StoreAsync(
|
||||
string tenantId,
|
||||
ReadOnlyMemory<byte> data,
|
||||
ProvenanceMetadata provenance,
|
||||
string contentType = "application/json",
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stores a payload from a stream.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier for bucket selection.</param>
|
||||
/// <param name="stream">Stream containing payload data.</param>
|
||||
/// <param name="provenance">Provenance metadata for the payload.</param>
|
||||
/// <param name="contentType">MIME type of the content.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Reference to the stored payload.</returns>
|
||||
Task<PayloadReference> StoreStreamAsync(
|
||||
string tenantId,
|
||||
Stream stream,
|
||||
ProvenanceMetadata provenance,
|
||||
string contentType = "application/json",
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a payload by its reference.
|
||||
/// </summary>
|
||||
/// <param name="reference">Reference to the payload.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Payload data, or null if not found.</returns>
|
||||
Task<byte[]?> RetrieveAsync(
|
||||
PayloadReference reference,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves a payload as a stream.
|
||||
/// </summary>
|
||||
/// <param name="reference">Reference to the payload.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Stream containing payload data, or null if not found.</returns>
|
||||
Task<Stream?> RetrieveStreamAsync(
|
||||
PayloadReference reference,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if an object exists.
|
||||
/// </summary>
|
||||
/// <param name="pointer">Object pointer to check.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if object exists.</returns>
|
||||
Task<bool> ExistsAsync(
|
||||
ObjectPointer pointer,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes an object.
|
||||
/// </summary>
|
||||
/// <param name="pointer">Object pointer to delete.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task DeleteAsync(
|
||||
ObjectPointer pointer,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Ensures the tenant bucket exists.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task EnsureBucketExistsAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a payload's integrity by comparing its hash.
|
||||
/// </summary>
|
||||
/// <param name="reference">Reference to verify.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if hash matches.</returns>
|
||||
Task<bool> VerifyIntegrityAsync(
|
||||
PayloadReference reference,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Record of a migration from GridFS to S3.
|
||||
/// </summary>
|
||||
public sealed record MigrationRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// Original GridFS ObjectId.
|
||||
/// </summary>
|
||||
public required string GridFsId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Pointer to the migrated object.
|
||||
/// </summary>
|
||||
public required ObjectPointer Pointer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when migration was performed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset MigratedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current status of the migration.
|
||||
/// </summary>
|
||||
public required MigrationStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when content hash was verified post-migration.
|
||||
/// </summary>
|
||||
public DateTimeOffset? VerifiedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether GridFS tombstone still exists for rollback.
|
||||
/// </summary>
|
||||
public bool RollbackAvailable { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Error message if migration failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of a GridFS to S3 migration.
|
||||
/// </summary>
|
||||
public enum MigrationStatus
|
||||
{
|
||||
/// <summary>Migration pending.</summary>
|
||||
Pending,
|
||||
|
||||
/// <summary>Migration completed.</summary>
|
||||
Migrated,
|
||||
|
||||
/// <summary>Migration verified via hash comparison.</summary>
|
||||
Verified,
|
||||
|
||||
/// <summary>Migration failed.</summary>
|
||||
Failed,
|
||||
|
||||
/// <summary>Original GridFS tombstoned.</summary>
|
||||
Tombstoned
|
||||
}
|
||||
@@ -1,232 +0,0 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization.Attributes;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// MongoDB-backed migration tracker for GridFS to S3 migrations.
|
||||
/// </summary>
|
||||
public sealed class MongoMigrationTracker : IMigrationTracker
|
||||
{
|
||||
private const string CollectionName = "object_storage_migrations";
|
||||
|
||||
private readonly IMongoCollection<MigrationDocument> _collection;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<MongoMigrationTracker> _logger;
|
||||
|
||||
public MongoMigrationTracker(
|
||||
IMongoDatabase database,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<MongoMigrationTracker> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(database);
|
||||
_collection = database.GetCollection<MigrationDocument>(CollectionName);
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<MigrationRecord> RecordMigrationAsync(
|
||||
string gridFsId,
|
||||
ObjectPointer pointer,
|
||||
MigrationStatus status,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
ArgumentNullException.ThrowIfNull(pointer);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var document = new MigrationDocument
|
||||
{
|
||||
GridFsId = gridFsId,
|
||||
Bucket = pointer.Bucket,
|
||||
Key = pointer.Key,
|
||||
Sha256 = pointer.Sha256,
|
||||
Size = pointer.Size,
|
||||
ContentType = pointer.ContentType,
|
||||
Encoding = pointer.Encoding.ToString().ToLowerInvariant(),
|
||||
MigratedAt = now.UtcDateTime,
|
||||
Status = status.ToString().ToLowerInvariant(),
|
||||
RollbackAvailable = true,
|
||||
};
|
||||
|
||||
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded migration for GridFS {GridFsId} to {Bucket}/{Key}",
|
||||
gridFsId, pointer.Bucket, pointer.Key);
|
||||
|
||||
return ToRecord(document);
|
||||
}
|
||||
|
||||
public async Task UpdateStatusAsync(
|
||||
string gridFsId,
|
||||
MigrationStatus status,
|
||||
string? errorMessage = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
|
||||
var filter = Builders<MigrationDocument>.Filter.Eq(d => d.GridFsId, gridFsId);
|
||||
var update = Builders<MigrationDocument>.Update
|
||||
.Set(d => d.Status, status.ToString().ToLowerInvariant())
|
||||
.Set(d => d.ErrorMessage, errorMessage);
|
||||
|
||||
await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Updated migration status for {GridFsId} to {Status}", gridFsId, status);
|
||||
}
|
||||
|
||||
public async Task MarkVerifiedAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var filter = Builders<MigrationDocument>.Filter.Eq(d => d.GridFsId, gridFsId);
|
||||
var update = Builders<MigrationDocument>.Update
|
||||
.Set(d => d.Status, MigrationStatus.Verified.ToString().ToLowerInvariant())
|
||||
.Set(d => d.VerifiedAt, now.UtcDateTime);
|
||||
|
||||
await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Marked migration as verified for {GridFsId}", gridFsId);
|
||||
}
|
||||
|
||||
public async Task<MigrationRecord?> GetByGridFsIdAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
|
||||
var filter = Builders<MigrationDocument>.Filter.Eq(d => d.GridFsId, gridFsId);
|
||||
var document = await _collection.Find(filter)
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return document is null ? null : ToRecord(document);
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<MigrationRecord>> ListPendingAsync(
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var filter = Builders<MigrationDocument>.Filter.Eq(
|
||||
d => d.Status, MigrationStatus.Pending.ToString().ToLowerInvariant());
|
||||
|
||||
var documents = await _collection.Find(filter)
|
||||
.Limit(limit)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return documents.Select(ToRecord).ToList();
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<MigrationRecord>> ListNeedingVerificationAsync(
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var filter = Builders<MigrationDocument>.Filter.Eq(
|
||||
d => d.Status, MigrationStatus.Migrated.ToString().ToLowerInvariant());
|
||||
|
||||
var documents = await _collection.Find(filter)
|
||||
.Limit(limit)
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return documents.Select(ToRecord).ToList();
|
||||
}
|
||||
|
||||
public async Task<bool> IsMigratedAsync(
|
||||
string gridFsId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(gridFsId);
|
||||
|
||||
var filter = Builders<MigrationDocument>.Filter.And(
|
||||
Builders<MigrationDocument>.Filter.Eq(d => d.GridFsId, gridFsId),
|
||||
Builders<MigrationDocument>.Filter.In(d => d.Status, new[]
|
||||
{
|
||||
MigrationStatus.Migrated.ToString().ToLowerInvariant(),
|
||||
MigrationStatus.Verified.ToString().ToLowerInvariant()
|
||||
}));
|
||||
|
||||
var count = await _collection.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return count > 0;
|
||||
}
|
||||
|
||||
private static MigrationRecord ToRecord(MigrationDocument document)
|
||||
{
|
||||
return new MigrationRecord
|
||||
{
|
||||
GridFsId = document.GridFsId,
|
||||
Pointer = new ObjectPointer
|
||||
{
|
||||
Bucket = document.Bucket,
|
||||
Key = document.Key,
|
||||
Sha256 = document.Sha256,
|
||||
Size = document.Size,
|
||||
ContentType = document.ContentType,
|
||||
Encoding = Enum.Parse<ContentEncoding>(document.Encoding, ignoreCase: true),
|
||||
},
|
||||
MigratedAt = new DateTimeOffset(document.MigratedAt, TimeSpan.Zero),
|
||||
Status = Enum.Parse<MigrationStatus>(document.Status, ignoreCase: true),
|
||||
VerifiedAt = document.VerifiedAt.HasValue
|
||||
? new DateTimeOffset(document.VerifiedAt.Value, TimeSpan.Zero)
|
||||
: null,
|
||||
RollbackAvailable = document.RollbackAvailable,
|
||||
ErrorMessage = document.ErrorMessage,
|
||||
};
|
||||
}
|
||||
|
||||
[BsonIgnoreExtraElements]
|
||||
private sealed class MigrationDocument
|
||||
{
|
||||
[BsonId]
|
||||
[BsonRepresentation(BsonType.ObjectId)]
|
||||
public string? Id { get; set; }
|
||||
|
||||
[BsonElement("gridFsId")]
|
||||
public required string GridFsId { get; set; }
|
||||
|
||||
[BsonElement("bucket")]
|
||||
public required string Bucket { get; set; }
|
||||
|
||||
[BsonElement("key")]
|
||||
public required string Key { get; set; }
|
||||
|
||||
[BsonElement("sha256")]
|
||||
public required string Sha256 { get; set; }
|
||||
|
||||
[BsonElement("size")]
|
||||
public required long Size { get; set; }
|
||||
|
||||
[BsonElement("contentType")]
|
||||
public required string ContentType { get; set; }
|
||||
|
||||
[BsonElement("encoding")]
|
||||
public required string Encoding { get; set; }
|
||||
|
||||
[BsonElement("migratedAt")]
|
||||
public required DateTime MigratedAt { get; set; }
|
||||
|
||||
[BsonElement("status")]
|
||||
public required string Status { get; set; }
|
||||
|
||||
[BsonElement("verifiedAt")]
|
||||
public DateTime? VerifiedAt { get; set; }
|
||||
|
||||
[BsonElement("rollbackAvailable")]
|
||||
public bool RollbackAvailable { get; set; }
|
||||
|
||||
[BsonElement("errorMessage")]
|
||||
public string? ErrorMessage { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic pointer to an object in S3-compatible storage.
|
||||
/// </summary>
|
||||
public sealed record ObjectPointer
|
||||
{
|
||||
/// <summary>
|
||||
/// S3 bucket name (tenant-prefixed).
|
||||
/// </summary>
|
||||
public required string Bucket { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Object key (deterministic, content-addressed).
|
||||
/// </summary>
|
||||
public required string Key { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of object content (hex encoded).
|
||||
/// </summary>
|
||||
public required string Sha256 { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Object size in bytes.
|
||||
/// </summary>
|
||||
public required long Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type of the object.
|
||||
/// </summary>
|
||||
public string ContentType { get; init; } = "application/octet-stream";
|
||||
|
||||
/// <summary>
|
||||
/// Content encoding if compressed.
|
||||
/// </summary>
|
||||
public ContentEncoding Encoding { get; init; } = ContentEncoding.Identity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Content encoding for stored objects.
|
||||
/// </summary>
|
||||
public enum ContentEncoding
|
||||
{
|
||||
/// <summary>No compression.</summary>
|
||||
Identity,
|
||||
|
||||
/// <summary>Gzip compression.</summary>
|
||||
Gzip,
|
||||
|
||||
/// <summary>Zstandard compression.</summary>
|
||||
Zstd
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for S3-compatible object storage.
|
||||
/// </summary>
|
||||
public sealed class ObjectStorageOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "Concelier:ObjectStorage";
|
||||
|
||||
/// <summary>
|
||||
/// S3-compatible endpoint URL (MinIO, AWS S3, etc.).
|
||||
/// </summary>
|
||||
public string Endpoint { get; set; } = "http://localhost:9000";
|
||||
|
||||
/// <summary>
|
||||
/// Storage region (use 'us-east-1' for MinIO).
|
||||
/// </summary>
|
||||
public string Region { get; set; } = "us-east-1";
|
||||
|
||||
/// <summary>
|
||||
/// Use path-style addressing (required for MinIO).
|
||||
/// </summary>
|
||||
public bool UsePathStyle { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Prefix for tenant bucket names.
|
||||
/// </summary>
|
||||
public string BucketPrefix { get; set; } = "stellaops-concelier-";
|
||||
|
||||
/// <summary>
|
||||
/// Maximum object size in bytes (default 5GB).
|
||||
/// </summary>
|
||||
public long MaxObjectSize { get; set; } = 5L * 1024 * 1024 * 1024;
|
||||
|
||||
/// <summary>
|
||||
/// Objects larger than this (bytes) will be compressed.
|
||||
/// Default: 1MB.
|
||||
/// </summary>
|
||||
public int CompressionThreshold { get; set; } = 1024 * 1024;
|
||||
|
||||
/// <summary>
|
||||
/// Objects smaller than this (bytes) will be stored inline.
|
||||
/// Default: 64KB.
|
||||
/// </summary>
|
||||
public int InlineThreshold { get; set; } = 64 * 1024;
|
||||
|
||||
/// <summary>
|
||||
/// Whether object storage is enabled. When false, uses GridFS fallback.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// AWS access key ID (or MinIO access key).
|
||||
/// </summary>
|
||||
public string? AccessKeyId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// AWS secret access key (or MinIO secret key).
|
||||
/// </summary>
|
||||
public string? SecretAccessKey { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the bucket name for a tenant.
|
||||
/// </summary>
|
||||
public string GetBucketName(string tenantId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
// Normalize tenant ID to lowercase and replace invalid characters
|
||||
var normalized = tenantId.ToLowerInvariant().Replace('_', '-');
|
||||
return $"{BucketPrefix}{normalized}";
|
||||
}
|
||||
}
|
||||
@@ -1,128 +0,0 @@
|
||||
using Amazon;
|
||||
using Amazon.Runtime;
|
||||
using Amazon.S3;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering object storage services.
|
||||
/// </summary>
|
||||
public static class ObjectStorageServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds object storage services for Concelier raw payload storage.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddConcelierObjectStorage(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
// Bind options
|
||||
services.Configure<ObjectStorageOptions>(
|
||||
configuration.GetSection(ObjectStorageOptions.SectionName));
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
|
||||
// Register S3 client
|
||||
services.TryAddSingleton<IAmazonS3>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<ObjectStorageOptions>>().Value;
|
||||
|
||||
var config = new AmazonS3Config
|
||||
{
|
||||
RegionEndpoint = RegionEndpoint.GetBySystemName(options.Region),
|
||||
ForcePathStyle = options.UsePathStyle,
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(options.Endpoint))
|
||||
{
|
||||
config.ServiceURL = options.Endpoint;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(options.AccessKeyId) &&
|
||||
!string.IsNullOrEmpty(options.SecretAccessKey))
|
||||
{
|
||||
var credentials = new BasicAWSCredentials(
|
||||
options.AccessKeyId,
|
||||
options.SecretAccessKey);
|
||||
return new AmazonS3Client(credentials, config);
|
||||
}
|
||||
|
||||
// Use default credentials chain (env vars, IAM role, etc.)
|
||||
return new AmazonS3Client(config);
|
||||
});
|
||||
|
||||
// Register object store
|
||||
services.TryAddSingleton<IObjectStore, S3ObjectStore>();
|
||||
|
||||
// Register migration tracker
|
||||
services.TryAddSingleton<IMigrationTracker, MongoMigrationTracker>();
|
||||
|
||||
// Register migration service
|
||||
services.TryAddSingleton<GridFsMigrationService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds object storage services with explicit options.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddConcelierObjectStorage(
|
||||
this IServiceCollection services,
|
||||
Action<ObjectStorageOptions> configureOptions)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configureOptions);
|
||||
|
||||
services.Configure(configureOptions);
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
|
||||
// Register S3 client
|
||||
services.TryAddSingleton<IAmazonS3>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<ObjectStorageOptions>>().Value;
|
||||
|
||||
var config = new AmazonS3Config
|
||||
{
|
||||
RegionEndpoint = RegionEndpoint.GetBySystemName(options.Region),
|
||||
ForcePathStyle = options.UsePathStyle,
|
||||
};
|
||||
|
||||
if (!string.IsNullOrEmpty(options.Endpoint))
|
||||
{
|
||||
config.ServiceURL = options.Endpoint;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(options.AccessKeyId) &&
|
||||
!string.IsNullOrEmpty(options.SecretAccessKey))
|
||||
{
|
||||
var credentials = new BasicAWSCredentials(
|
||||
options.AccessKeyId,
|
||||
options.SecretAccessKey);
|
||||
return new AmazonS3Client(credentials, config);
|
||||
}
|
||||
|
||||
return new AmazonS3Client(config);
|
||||
});
|
||||
|
||||
// Register object store
|
||||
services.TryAddSingleton<IObjectStore, S3ObjectStore>();
|
||||
|
||||
// Register migration tracker
|
||||
services.TryAddSingleton<IMigrationTracker, MongoMigrationTracker>();
|
||||
|
||||
// Register migration service
|
||||
services.TryAddSingleton<GridFsMigrationService>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a large payload stored in object storage (used in advisory_observations).
|
||||
/// </summary>
|
||||
public sealed record PayloadReference
|
||||
{
|
||||
/// <summary>
|
||||
/// Discriminator for payload type.
|
||||
/// </summary>
|
||||
public const string TypeDiscriminator = "object-storage-ref";
|
||||
|
||||
/// <summary>
|
||||
/// Type discriminator value.
|
||||
/// </summary>
|
||||
public string Type { get; init; } = TypeDiscriminator;
|
||||
|
||||
/// <summary>
|
||||
/// Pointer to the object in storage.
|
||||
/// </summary>
|
||||
public required ObjectPointer Pointer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance metadata for the payload.
|
||||
/// </summary>
|
||||
public required ProvenanceMetadata Provenance { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// If true, payload is small enough to be inline (not in object storage).
|
||||
/// </summary>
|
||||
public bool Inline { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded inline data (only if Inline=true and size less than threshold).
|
||||
/// </summary>
|
||||
public string? InlineData { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a reference for inline data.
|
||||
/// </summary>
|
||||
public static PayloadReference CreateInline(
|
||||
byte[] data,
|
||||
string sha256,
|
||||
ProvenanceMetadata provenance,
|
||||
string contentType = "application/octet-stream")
|
||||
{
|
||||
return new PayloadReference
|
||||
{
|
||||
Pointer = new ObjectPointer
|
||||
{
|
||||
Bucket = string.Empty,
|
||||
Key = string.Empty,
|
||||
Sha256 = sha256,
|
||||
Size = data.Length,
|
||||
ContentType = contentType,
|
||||
Encoding = ContentEncoding.Identity,
|
||||
},
|
||||
Provenance = provenance,
|
||||
Inline = true,
|
||||
InlineData = Convert.ToBase64String(data),
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a reference for object storage data.
|
||||
/// </summary>
|
||||
public static PayloadReference CreateObjectStorage(
|
||||
ObjectPointer pointer,
|
||||
ProvenanceMetadata provenance)
|
||||
{
|
||||
return new PayloadReference
|
||||
{
|
||||
Pointer = pointer,
|
||||
Provenance = provenance,
|
||||
Inline = false,
|
||||
InlineData = null,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// Provenance metadata preserved from original ingestion.
|
||||
/// </summary>
|
||||
public sealed record ProvenanceMetadata
|
||||
{
|
||||
/// <summary>
|
||||
/// Identifier of the original data source (URI).
|
||||
/// </summary>
|
||||
public required string SourceId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp of original ingestion.
|
||||
/// </summary>
|
||||
public required DateTimeOffset IngestedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier for multi-tenant isolation.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original format before normalization.
|
||||
/// </summary>
|
||||
public OriginalFormat? OriginalFormat { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original size before any transformation.
|
||||
/// </summary>
|
||||
public long? OriginalSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of transformations applied.
|
||||
/// </summary>
|
||||
public IReadOnlyList<TransformationRecord> Transformations { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Original GridFS ObjectId for migration tracking.
|
||||
/// </summary>
|
||||
public string? GridFsLegacyId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Original format of ingested data.
|
||||
/// </summary>
|
||||
public enum OriginalFormat
|
||||
{
|
||||
Json,
|
||||
Xml,
|
||||
Csv,
|
||||
Ndjson,
|
||||
Yaml
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record of a transformation applied to the payload.
|
||||
/// </summary>
|
||||
public sealed record TransformationRecord
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of transformation.
|
||||
/// </summary>
|
||||
public required TransformationType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when transformation was applied.
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Agent/service that performed the transformation.
|
||||
/// </summary>
|
||||
public required string Agent { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of transformations that can be applied.
|
||||
/// </summary>
|
||||
public enum TransformationType
|
||||
{
|
||||
Compression,
|
||||
Normalization,
|
||||
Redaction,
|
||||
Migration
|
||||
}
|
||||
@@ -1,320 +0,0 @@
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using Amazon.S3;
|
||||
using Amazon.S3.Model;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.ObjectStorage;
|
||||
|
||||
/// <summary>
|
||||
/// S3-compatible object store implementation for raw advisory payloads.
|
||||
/// </summary>
|
||||
public sealed class S3ObjectStore : IObjectStore
|
||||
{
|
||||
private readonly IAmazonS3 _s3;
|
||||
private readonly ObjectStorageOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<S3ObjectStore> _logger;
|
||||
|
||||
public S3ObjectStore(
|
||||
IAmazonS3 s3,
|
||||
IOptions<ObjectStorageOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<S3ObjectStore> logger)
|
||||
{
|
||||
_s3 = s3 ?? throw new ArgumentNullException(nameof(s3));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<PayloadReference> StoreAsync(
|
||||
string tenantId,
|
||||
ReadOnlyMemory<byte> data,
|
||||
ProvenanceMetadata provenance,
|
||||
string contentType = "application/json",
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(provenance);
|
||||
|
||||
var dataArray = data.ToArray();
|
||||
var sha256 = ComputeSha256(dataArray);
|
||||
|
||||
// Use inline storage for small payloads
|
||||
if (dataArray.Length < _options.InlineThreshold)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Storing inline payload for tenant {TenantId}, size {Size} bytes",
|
||||
tenantId, dataArray.Length);
|
||||
|
||||
return PayloadReference.CreateInline(dataArray, sha256, provenance, contentType);
|
||||
}
|
||||
|
||||
// Store in S3
|
||||
var bucket = _options.GetBucketName(tenantId);
|
||||
await EnsureBucketExistsAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var shouldCompress = dataArray.Length >= _options.CompressionThreshold;
|
||||
var encoding = ContentEncoding.Identity;
|
||||
byte[] payloadToStore = dataArray;
|
||||
|
||||
if (shouldCompress)
|
||||
{
|
||||
payloadToStore = CompressGzip(dataArray);
|
||||
encoding = ContentEncoding.Gzip;
|
||||
_logger.LogDebug(
|
||||
"Compressed payload from {OriginalSize} to {CompressedSize} bytes",
|
||||
dataArray.Length, payloadToStore.Length);
|
||||
}
|
||||
|
||||
var key = GenerateKey(sha256, provenance.IngestedAt, contentType, encoding);
|
||||
|
||||
var request = new PutObjectRequest
|
||||
{
|
||||
BucketName = bucket,
|
||||
Key = key,
|
||||
InputStream = new MemoryStream(payloadToStore),
|
||||
ContentType = encoding == ContentEncoding.Gzip ? "application/gzip" : contentType,
|
||||
AutoCloseStream = true,
|
||||
};
|
||||
|
||||
// Add metadata
|
||||
request.Metadata["x-stellaops-sha256"] = sha256;
|
||||
request.Metadata["x-stellaops-original-size"] = dataArray.Length.ToString();
|
||||
request.Metadata["x-stellaops-encoding"] = encoding.ToString().ToLowerInvariant();
|
||||
request.Metadata["x-stellaops-source-id"] = provenance.SourceId;
|
||||
request.Metadata["x-stellaops-ingested-at"] = provenance.IngestedAt.ToString("O");
|
||||
|
||||
await _s3.PutObjectAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Stored object {Bucket}/{Key}, size {Size} bytes, encoding {Encoding}",
|
||||
bucket, key, payloadToStore.Length, encoding);
|
||||
|
||||
var pointer = new ObjectPointer
|
||||
{
|
||||
Bucket = bucket,
|
||||
Key = key,
|
||||
Sha256 = sha256,
|
||||
Size = payloadToStore.Length,
|
||||
ContentType = contentType,
|
||||
Encoding = encoding,
|
||||
};
|
||||
|
||||
return PayloadReference.CreateObjectStorage(pointer, provenance);
|
||||
}
|
||||
|
||||
public async Task<PayloadReference> StoreStreamAsync(
|
||||
string tenantId,
|
||||
Stream stream,
|
||||
ProvenanceMetadata provenance,
|
||||
string contentType = "application/json",
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
ArgumentNullException.ThrowIfNull(provenance);
|
||||
|
||||
// Read stream to memory for hash computation
|
||||
using var memoryStream = new MemoryStream();
|
||||
await stream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
var data = memoryStream.ToArray();
|
||||
|
||||
return await StoreAsync(tenantId, data, provenance, contentType, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<byte[]?> RetrieveAsync(
|
||||
PayloadReference reference,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(reference);
|
||||
|
||||
// Handle inline data
|
||||
if (reference.Inline && reference.InlineData is not null)
|
||||
{
|
||||
return Convert.FromBase64String(reference.InlineData);
|
||||
}
|
||||
|
||||
var stream = await RetrieveStreamAsync(reference, cancellationToken).ConfigureAwait(false);
|
||||
if (stream is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
using (stream)
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
await stream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
|
||||
return memoryStream.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<Stream?> RetrieveStreamAsync(
|
||||
PayloadReference reference,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(reference);
|
||||
|
||||
// Handle inline data
|
||||
if (reference.Inline && reference.InlineData is not null)
|
||||
{
|
||||
return new MemoryStream(Convert.FromBase64String(reference.InlineData));
|
||||
}
|
||||
|
||||
var pointer = reference.Pointer;
|
||||
try
|
||||
{
|
||||
var response = await _s3.GetObjectAsync(pointer.Bucket, pointer.Key, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
Stream resultStream = response.ResponseStream;
|
||||
|
||||
// Decompress if needed
|
||||
if (pointer.Encoding == ContentEncoding.Gzip)
|
||||
{
|
||||
var decompressed = new MemoryStream();
|
||||
using (var gzip = new GZipStream(response.ResponseStream, CompressionMode.Decompress))
|
||||
{
|
||||
await gzip.CopyToAsync(decompressed, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
decompressed.Position = 0;
|
||||
resultStream = decompressed;
|
||||
}
|
||||
|
||||
return resultStream;
|
||||
}
|
||||
catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogWarning("Object not found: {Bucket}/{Key}", pointer.Bucket, pointer.Key);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<bool> ExistsAsync(
|
||||
ObjectPointer pointer,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(pointer);
|
||||
|
||||
try
|
||||
{
|
||||
var metadata = await _s3.GetObjectMetadataAsync(pointer.Bucket, pointer.Key, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return metadata.HttpStatusCode == System.Net.HttpStatusCode.OK;
|
||||
}
|
||||
catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task DeleteAsync(
|
||||
ObjectPointer pointer,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(pointer);
|
||||
|
||||
await _s3.DeleteObjectAsync(pointer.Bucket, pointer.Key, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Deleted object {Bucket}/{Key}", pointer.Bucket, pointer.Key);
|
||||
}
|
||||
|
||||
public async Task EnsureBucketExistsAsync(
|
||||
string tenantId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var bucket = _options.GetBucketName(tenantId);
|
||||
|
||||
try
|
||||
{
|
||||
await _s3.EnsureBucketExistsAsync(bucket).ConfigureAwait(false);
|
||||
_logger.LogDebug("Ensured bucket exists: {Bucket}", bucket);
|
||||
}
|
||||
catch (AmazonS3Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to ensure bucket exists: {Bucket}", bucket);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<bool> VerifyIntegrityAsync(
|
||||
PayloadReference reference,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(reference);
|
||||
|
||||
var data = await RetrieveAsync(reference, cancellationToken).ConfigureAwait(false);
|
||||
if (data is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var computedHash = ComputeSha256(data);
|
||||
var matches = string.Equals(computedHash, reference.Pointer.Sha256, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
if (!matches)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Integrity check failed for {Bucket}/{Key}: expected {Expected}, got {Actual}",
|
||||
reference.Pointer.Bucket, reference.Pointer.Key,
|
||||
reference.Pointer.Sha256, computedHash);
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static byte[] CompressGzip(byte[] data)
|
||||
{
|
||||
using var output = new MemoryStream();
|
||||
using (var gzip = new GZipStream(output, CompressionLevel.Optimal, leaveOpen: true))
|
||||
{
|
||||
gzip.Write(data);
|
||||
}
|
||||
return output.ToArray();
|
||||
}
|
||||
|
||||
private static string GenerateKey(
|
||||
string sha256,
|
||||
DateTimeOffset ingestedAt,
|
||||
string contentType,
|
||||
ContentEncoding encoding)
|
||||
{
|
||||
var date = ingestedAt.UtcDateTime;
|
||||
var extension = GetExtension(contentType, encoding);
|
||||
|
||||
// Format: advisories/raw/YYYY/MM/DD/sha256-{hash}.{extension}
|
||||
return $"advisories/raw/{date:yyyy}/{date:MM}/{date:dd}/sha256-{sha256[..16]}{extension}";
|
||||
}
|
||||
|
||||
private static string GetExtension(string contentType, ContentEncoding encoding)
|
||||
{
|
||||
var baseExt = contentType switch
|
||||
{
|
||||
"application/json" => ".json",
|
||||
"application/xml" or "text/xml" => ".xml",
|
||||
"text/csv" => ".csv",
|
||||
"application/x-ndjson" => ".ndjson",
|
||||
"application/x-yaml" or "text/yaml" => ".yaml",
|
||||
_ => ".bin"
|
||||
};
|
||||
|
||||
return encoding switch
|
||||
{
|
||||
ContentEncoding.Gzip => baseExt + ".gz",
|
||||
ContentEncoding.Zstd => baseExt + ".zst",
|
||||
_ => baseExt
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="AWSSDK.S3" Version="3.7.305.6" />
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,82 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Conflicts;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryConflictStoreTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
|
||||
public AdvisoryConflictStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndRetrieve_PersistsConflicts()
|
||||
{
|
||||
var store = new AdvisoryConflictStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
var statementIds = new[] { Guid.NewGuid(), Guid.NewGuid() };
|
||||
|
||||
var conflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x10, 0x20 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(30),
|
||||
statementIds,
|
||||
new BsonDocument("explanation", "first-pass"));
|
||||
|
||||
await store.InsertAsync(new[] { conflict }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetConflictsAsync(vulnerabilityKey, null, CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal(conflict.Id, results[0].Id);
|
||||
Assert.Equal(statementIds, results[0].StatementIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetConflicts_AsOfFilters()
|
||||
{
|
||||
var store = new AdvisoryConflictStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var earlyConflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x01 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(10),
|
||||
new[] { Guid.NewGuid() },
|
||||
new BsonDocument("stage", "early"));
|
||||
|
||||
var lateConflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x02 },
|
||||
baseTime.AddMinutes(10),
|
||||
baseTime.AddMinutes(10).AddSeconds(15),
|
||||
new[] { Guid.NewGuid() },
|
||||
new BsonDocument("stage", "late"));
|
||||
|
||||
await store.InsertAsync(new[] { earlyConflict, lateConflict }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetConflictsAsync(vulnerabilityKey, baseTime.AddMinutes(1), CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("early", results[0].Details["stage"].AsString);
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Statements;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStatementStoreTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
|
||||
public AdvisoryStatementStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndRetrieve_WritesImmutableStatements()
|
||||
{
|
||||
var store = new AdvisoryStatementStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var statements = new[]
|
||||
{
|
||||
new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x01 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(5),
|
||||
new BsonDocument("version", "A"),
|
||||
new[] { Guid.NewGuid() }),
|
||||
new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x02 },
|
||||
baseTime.AddMinutes(1),
|
||||
baseTime.AddMinutes(1).AddSeconds(5),
|
||||
new BsonDocument("version", "B"),
|
||||
Array.Empty<Guid>()),
|
||||
};
|
||||
|
||||
await store.InsertAsync(statements, CancellationToken.None);
|
||||
|
||||
var results = await store.GetStatementsAsync(vulnerabilityKey, null, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, results.Count);
|
||||
Assert.Equal(statements[1].Id, results[0].Id); // sorted by AsOf desc
|
||||
Assert.True(results.All(record => record.Payload.Contains("version")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatements_AsOfFiltersResults()
|
||||
{
|
||||
var store = new AdvisoryStatementStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var early = new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0xAA },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(10),
|
||||
new BsonDocument("state", "early"),
|
||||
Array.Empty<Guid>());
|
||||
|
||||
var late = new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0xBB },
|
||||
baseTime.AddMinutes(5),
|
||||
baseTime.AddMinutes(5).AddSeconds(10),
|
||||
new BsonDocument("state", "late"),
|
||||
Array.Empty<Guid>());
|
||||
|
||||
await store.InsertAsync(new[] { early, late }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetStatementsAsync(vulnerabilityKey, baseTime.AddMinutes(1), CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("early", results[0].Payload["state"].AsString);
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStorePerformanceTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private const int LargeAdvisoryCount = 30;
|
||||
private const int AliasesPerAdvisory = 24;
|
||||
private const int ReferencesPerAdvisory = 180;
|
||||
private const int AffectedPackagesPerAdvisory = 140;
|
||||
private const int VersionRangesPerPackage = 4;
|
||||
private const int CvssMetricsPerAdvisory = 24;
|
||||
private const int ProvenanceEntriesPerAdvisory = 16;
|
||||
private static readonly string LargeSummary = new('A', 128 * 1024);
|
||||
private static readonly DateTimeOffset BasePublished = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
private static readonly DateTimeOffset BaseRecorded = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
private static readonly TimeSpan TotalBudget = TimeSpan.FromSeconds(28);
|
||||
private const double UpsertBudgetPerAdvisoryMs = 500;
|
||||
private const double FetchBudgetPerAdvisoryMs = 200;
|
||||
private const double FindBudgetPerAdvisoryMs = 200;
|
||||
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public AdvisoryStorePerformanceTests(MongoIntegrationFixture fixture, ITestOutputHelper output)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_output = output;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndQueryLargeAdvisories_CompletesWithinBudget()
|
||||
{
|
||||
var databaseName = $"concelier-performance-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var migrationRunner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
migrationRunner);
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var aliasStore = new AliasStore(database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(45));
|
||||
|
||||
// Warm up collections (indexes, serialization caches) so perf timings exclude one-time setup work.
|
||||
var warmup = CreateLargeAdvisory(-1);
|
||||
await store.UpsertAsync(warmup, cts.Token);
|
||||
_ = await store.FindAsync(warmup.AdvisoryKey, cts.Token);
|
||||
_ = await store.GetRecentAsync(1, cts.Token);
|
||||
|
||||
var advisories = Enumerable.Range(0, LargeAdvisoryCount)
|
||||
.Select(CreateLargeAdvisory)
|
||||
.ToArray();
|
||||
|
||||
var upsertWatch = Stopwatch.StartNew();
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await store.UpsertAsync(advisory, cts.Token);
|
||||
}
|
||||
|
||||
upsertWatch.Stop();
|
||||
var upsertPerAdvisory = upsertWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
var fetchWatch = Stopwatch.StartNew();
|
||||
var recent = await store.GetRecentAsync(LargeAdvisoryCount, cts.Token);
|
||||
fetchWatch.Stop();
|
||||
var fetchPerAdvisory = fetchWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
Assert.Equal(LargeAdvisoryCount, recent.Count);
|
||||
|
||||
var findWatch = Stopwatch.StartNew();
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var fetched = await store.FindAsync(advisory.AdvisoryKey, cts.Token);
|
||||
Assert.NotNull(fetched);
|
||||
}
|
||||
|
||||
findWatch.Stop();
|
||||
var findPerAdvisory = findWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
var totalElapsed = upsertWatch.Elapsed + fetchWatch.Elapsed + findWatch.Elapsed;
|
||||
|
||||
_output.WriteLine($"Upserted {LargeAdvisoryCount} large advisories in {upsertWatch.Elapsed} ({upsertPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Fetched recent advisories in {fetchWatch.Elapsed} ({fetchPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Looked up advisories individually in {findWatch.Elapsed} ({findPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Total elapsed {totalElapsed}.");
|
||||
|
||||
Assert.True(upsertPerAdvisory <= UpsertBudgetPerAdvisoryMs, $"Upsert exceeded {UpsertBudgetPerAdvisoryMs} ms per advisory: {upsertPerAdvisory:F2} ms.");
|
||||
Assert.True(fetchPerAdvisory <= FetchBudgetPerAdvisoryMs, $"GetRecent exceeded {FetchBudgetPerAdvisoryMs} ms per advisory: {fetchPerAdvisory:F2} ms.");
|
||||
Assert.True(findPerAdvisory <= FindBudgetPerAdvisoryMs, $"Find exceeded {FindBudgetPerAdvisoryMs} ms per advisory: {findPerAdvisory:F2} ms.");
|
||||
Assert.True(totalElapsed <= TotalBudget, $"Mongo advisory operations exceeded total budget {TotalBudget}: {totalElapsed}.");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static Advisory CreateLargeAdvisory(int index)
|
||||
{
|
||||
var baseKey = $"ADV-LARGE-{index:D4}";
|
||||
var published = BasePublished.AddDays(index);
|
||||
var modified = published.AddHours(6);
|
||||
|
||||
var aliases = Enumerable.Range(0, AliasesPerAdvisory)
|
||||
.Select(i => $"ALIAS-{baseKey}-{i:D4}")
|
||||
.ToArray();
|
||||
|
||||
var provenance = Enumerable.Range(0, ProvenanceEntriesPerAdvisory)
|
||||
.Select(i => new AdvisoryProvenance(
|
||||
source: i % 2 == 0 ? "nvd" : "vendor",
|
||||
kind: i % 3 == 0 ? "normalized" : "enriched",
|
||||
value: $"prov-{baseKey}-{i:D3}",
|
||||
recordedAt: BaseRecorded.AddDays(i)))
|
||||
.ToArray();
|
||||
|
||||
var references = Enumerable.Range(0, ReferencesPerAdvisory)
|
||||
.Select(i => new AdvisoryReference(
|
||||
url: $"https://vuln.example.com/{baseKey}/ref/{i:D4}",
|
||||
kind: i % 2 == 0 ? "advisory" : "article",
|
||||
sourceTag: $"tag-{i % 7}",
|
||||
summary: $"Reference {baseKey} #{i}",
|
||||
provenance: provenance[i % provenance.Length]))
|
||||
.ToArray();
|
||||
|
||||
var affectedPackages = Enumerable.Range(0, AffectedPackagesPerAdvisory)
|
||||
.Select(i => new AffectedPackage(
|
||||
type: i % 3 == 0 ? AffectedPackageTypes.Rpm : AffectedPackageTypes.Deb,
|
||||
identifier: $"pkg/{baseKey}/{i:D4}",
|
||||
platform: i % 4 == 0 ? "linux/x86_64" : "linux/aarch64",
|
||||
versionRanges: Enumerable.Range(0, VersionRangesPerPackage)
|
||||
.Select(r => new AffectedVersionRange(
|
||||
rangeKind: r % 2 == 0 ? "semver" : "evr",
|
||||
introducedVersion: $"1.{index}.{i}.{r}",
|
||||
fixedVersion: $"2.{index}.{i}.{r}",
|
||||
lastAffectedVersion: $"1.{index}.{i}.{r}",
|
||||
rangeExpression: $">=1.{index}.{i}.{r} <2.{index}.{i}.{r}",
|
||||
provenance: provenance[(i + r) % provenance.Length]))
|
||||
.ToArray(),
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[]
|
||||
{
|
||||
provenance[i % provenance.Length],
|
||||
provenance[(i + 3) % provenance.Length],
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
var cvssMetrics = Enumerable.Range(0, CvssMetricsPerAdvisory)
|
||||
.Select(i => new CvssMetric(
|
||||
version: i % 2 == 0 ? "3.1" : "2.0",
|
||||
vector: $"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:{(i % 3 == 0 ? "H" : "L")}",
|
||||
baseScore: Math.Max(0, 9.8 - i * 0.2),
|
||||
baseSeverity: i % 3 == 0 ? "critical" : "high",
|
||||
provenance: provenance[i % provenance.Length]))
|
||||
.ToArray();
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: baseKey,
|
||||
title: $"Large advisory {baseKey}",
|
||||
summary: LargeSummary,
|
||||
language: "en",
|
||||
published: published,
|
||||
modified: modified,
|
||||
severity: "critical",
|
||||
exploitKnown: index % 2 == 0,
|
||||
aliases: aliases,
|
||||
references: references,
|
||||
affectedPackages: affectedPackages,
|
||||
cvssMetrics: cvssMetrics,
|
||||
provenance: provenance);
|
||||
}
|
||||
}
|
||||
@@ -1,305 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AdvisoryStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndFetchAdvisory()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: "ADV-1",
|
||||
title: "Sample Advisory",
|
||||
summary: "Demo",
|
||||
language: "en",
|
||||
published: DateTimeOffset.UtcNow,
|
||||
modified: DateTimeOffset.UtcNow,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "ALIAS-1" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: Array.Empty<AdvisoryProvenance>());
|
||||
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var fetched = await store.FindAsync("ADV-1", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(advisory.AdvisoryKey, fetched!.AdvisoryKey);
|
||||
|
||||
var recent = await store.GetRecentAsync(5, CancellationToken.None);
|
||||
Assert.NotEmpty(recent);
|
||||
|
||||
var aliases = await aliasStore.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
|
||||
Assert.Contains(aliases, record => record.Scheme == AliasStoreConstants.PrimaryScheme && record.Value == "ADV-1");
|
||||
Assert.Contains(aliases, record => record.Value == "ALIAS-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RangePrimitives_RoundTripThroughMongo()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
|
||||
var recordedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var provenance = new AdvisoryProvenance("source-x", "mapper", "payload-123", recordedAt);
|
||||
var rangePrimitives = new RangePrimitives(
|
||||
new SemVerPrimitive(
|
||||
Introduced: "1.0.0",
|
||||
IntroducedInclusive: true,
|
||||
Fixed: "1.2.0",
|
||||
FixedInclusive: false,
|
||||
LastAffected: "1.1.5",
|
||||
LastAffectedInclusive: true,
|
||||
ConstraintExpression: ">=1.0.0 <1.2.0"),
|
||||
new NevraPrimitive(
|
||||
Introduced: new NevraComponent("pkg", 0, "1.0.0", "1", "x86_64"),
|
||||
Fixed: new NevraComponent("pkg", 1, "1.2.0", "2", "x86_64"),
|
||||
LastAffected: null),
|
||||
new EvrPrimitive(
|
||||
Introduced: new EvrComponent(1, "1.0.0", "1"),
|
||||
Fixed: null,
|
||||
LastAffected: new EvrComponent(1, "1.1.5", null)),
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["channel"] = "stable",
|
||||
["notesHash"] = "abc123",
|
||||
});
|
||||
|
||||
var versionRange = new AffectedVersionRange(
|
||||
rangeKind: "semver",
|
||||
introducedVersion: "1.0.0",
|
||||
fixedVersion: "1.2.0",
|
||||
lastAffectedVersion: "1.1.5",
|
||||
rangeExpression: ">=1.0.0 <1.2.0",
|
||||
provenance,
|
||||
rangePrimitives);
|
||||
|
||||
var affectedPackage = new AffectedPackage(
|
||||
type: "semver",
|
||||
identifier: "pkg@1.x",
|
||||
platform: "linux",
|
||||
versionRanges: new[] { versionRange },
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: "ADV-RANGE-1",
|
||||
title: "Sample Range Primitive",
|
||||
summary: "Testing range primitive persistence.",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-0001" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[] { affectedPackage },
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var fetched = await store.FindAsync("ADV-RANGE-1", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
var fetchedPackage = Assert.Single(fetched!.AffectedPackages);
|
||||
var fetchedRange = Assert.Single(fetchedPackage.VersionRanges);
|
||||
|
||||
Assert.Equal(versionRange.RangeKind, fetchedRange.RangeKind);
|
||||
Assert.Equal(versionRange.IntroducedVersion, fetchedRange.IntroducedVersion);
|
||||
Assert.Equal(versionRange.FixedVersion, fetchedRange.FixedVersion);
|
||||
Assert.Equal(versionRange.LastAffectedVersion, fetchedRange.LastAffectedVersion);
|
||||
Assert.Equal(versionRange.RangeExpression, fetchedRange.RangeExpression);
|
||||
Assert.Equal(versionRange.Provenance.Source, fetchedRange.Provenance.Source);
|
||||
Assert.Equal(versionRange.Provenance.Kind, fetchedRange.Provenance.Kind);
|
||||
Assert.Equal(versionRange.Provenance.Value, fetchedRange.Provenance.Value);
|
||||
Assert.Equal(versionRange.Provenance.DecisionReason, fetchedRange.Provenance.DecisionReason);
|
||||
Assert.Equal(versionRange.Provenance.RecordedAt, fetchedRange.Provenance.RecordedAt);
|
||||
Assert.True(versionRange.Provenance.FieldMask.SequenceEqual(fetchedRange.Provenance.FieldMask));
|
||||
|
||||
Assert.NotNull(fetchedRange.Primitives);
|
||||
Assert.Equal(rangePrimitives.SemVer, fetchedRange.Primitives!.SemVer);
|
||||
Assert.Equal(rangePrimitives.Nevra, fetchedRange.Primitives.Nevra);
|
||||
Assert.Equal(rangePrimitives.Evr, fetchedRange.Primitives.Evr);
|
||||
Assert.Equal(rangePrimitives.VendorExtensions, fetchedRange.Primitives.VendorExtensions);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_SkipsNormalizedVersionsWhenFeatureDisabled()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = false }),
|
||||
TimeProvider.System);
|
||||
|
||||
var advisory = CreateNormalizedAdvisory("ADV-NORM-DISABLED");
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var document = await _fixture.Database
|
||||
.GetCollection<AdvisoryDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Find(x => x.AdvisoryKey == advisory.AdvisoryKey)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(document);
|
||||
Assert.True(document!.NormalizedVersions is null || document.NormalizedVersions.Count == 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_PopulatesNormalizedVersionsWhenFeatureEnabled()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = true }),
|
||||
TimeProvider.System);
|
||||
|
||||
var advisory = CreateNormalizedAdvisory("ADV-NORM-ENABLED");
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var document = await _fixture.Database
|
||||
.GetCollection<AdvisoryDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Find(x => x.AdvisoryKey == advisory.AdvisoryKey)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(document);
|
||||
var normalizedCollection = document!.NormalizedVersions;
|
||||
Assert.NotNull(normalizedCollection);
|
||||
var normalized = Assert.Single(normalizedCollection!);
|
||||
Assert.Equal("pkg:npm/example", normalized.PackageId);
|
||||
Assert.Equal(AffectedPackageTypes.SemVer, normalized.PackageType);
|
||||
Assert.Equal(NormalizedVersionSchemes.SemVer, normalized.Scheme);
|
||||
Assert.Equal(NormalizedVersionRuleTypes.Range, normalized.Type);
|
||||
Assert.Equal("range", normalized.Style);
|
||||
Assert.Equal("1.0.0", normalized.Min);
|
||||
Assert.True(normalized.MinInclusive);
|
||||
Assert.Equal("2.0.0", normalized.Max);
|
||||
Assert.False(normalized.MaxInclusive);
|
||||
Assert.Null(normalized.Value);
|
||||
Assert.Equal("ghsa:pkg:npm/example", normalized.Notes);
|
||||
Assert.Equal("range-decision", normalized.DecisionReason);
|
||||
Assert.Equal(">= 1.0.0 < 2.0.0", normalized.Constraint);
|
||||
Assert.Equal("ghsa", normalized.Source);
|
||||
Assert.Equal(new DateTime(2025, 10, 9, 0, 0, 0, DateTimeKind.Utc), normalized.RecordedAtUtc);
|
||||
}
|
||||
|
||||
private static Advisory CreateNormalizedAdvisory(string advisoryKey)
|
||||
{
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 9, 0, 0, 0, TimeSpan.Zero);
|
||||
var rangeProvenance = new AdvisoryProvenance(
|
||||
source: "ghsa",
|
||||
kind: "affected-range",
|
||||
value: "pkg:npm/example",
|
||||
recordedAt: recordedAt,
|
||||
fieldMask: new[] { "affectedpackages[].versionranges[]" },
|
||||
decisionReason: "range-decision");
|
||||
|
||||
var semverPrimitive = new SemVerPrimitive(
|
||||
Introduced: "1.0.0",
|
||||
IntroducedInclusive: true,
|
||||
Fixed: "2.0.0",
|
||||
FixedInclusive: false,
|
||||
LastAffected: null,
|
||||
LastAffectedInclusive: false,
|
||||
ConstraintExpression: ">= 1.0.0 < 2.0.0");
|
||||
|
||||
var normalizedRule = semverPrimitive.ToNormalizedVersionRule("ghsa:pkg:npm/example")!;
|
||||
var versionRange = new AffectedVersionRange(
|
||||
rangeKind: "semver",
|
||||
introducedVersion: "1.0.0",
|
||||
fixedVersion: "2.0.0",
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: ">= 1.0.0 < 2.0.0",
|
||||
provenance: rangeProvenance,
|
||||
primitives: new RangePrimitives(semverPrimitive, null, null, null));
|
||||
|
||||
var package = new AffectedPackage(
|
||||
type: AffectedPackageTypes.SemVer,
|
||||
identifier: "pkg:npm/example",
|
||||
platform: "npm",
|
||||
versionRanges: new[] { versionRange },
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { rangeProvenance },
|
||||
normalizedVersions: new[] { normalizedRule });
|
||||
|
||||
var advisoryProvenance = new AdvisoryProvenance(
|
||||
source: "ghsa",
|
||||
kind: "document",
|
||||
value: advisoryKey,
|
||||
recordedAt: recordedAt,
|
||||
fieldMask: new[] { "advisory" },
|
||||
decisionReason: "document-decision");
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: advisoryKey,
|
||||
title: "Normalized advisory",
|
||||
summary: "Contains normalized versions for storage testing.",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { $"{advisoryKey}-ALIAS" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[] { package },
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { advisoryProvenance });
|
||||
}
|
||||
|
||||
private async Task DropCollectionAsync(string collectionName)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(collectionName);
|
||||
}
|
||||
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// ignore missing collection
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AliasStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AliasStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplaceAsync_UpsertsAliases_AndDetectsCollision()
|
||||
{
|
||||
await DropAliasCollectionAsync();
|
||||
var store = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
await store.ReplaceAsync(
|
||||
"ADV-1",
|
||||
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-1") },
|
||||
timestamp,
|
||||
CancellationToken.None);
|
||||
|
||||
var firstAliases = await store.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
|
||||
Assert.Contains(firstAliases, record => record.Scheme == "CVE" && record.Value == "CVE-2025-1234");
|
||||
|
||||
var result = await store.ReplaceAsync(
|
||||
"ADV-2",
|
||||
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-2") },
|
||||
timestamp.AddMinutes(1),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotEmpty(result.Collisions);
|
||||
var collision = Assert.Single(result.Collisions);
|
||||
Assert.Equal("CVE", collision.Scheme);
|
||||
Assert.Contains("ADV-1", collision.AdvisoryKeys);
|
||||
Assert.Contains("ADV-2", collision.AdvisoryKeys);
|
||||
}
|
||||
|
||||
private async Task DropAliasCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
}
|
||||
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class DocumentStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public DocumentStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndLookupDocument()
|
||||
{
|
||||
var store = new DocumentStore(_fixture.Database, NullLogger<DocumentStore>.Instance);
|
||||
var id = Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
id,
|
||||
"source",
|
||||
"https://example.com/advisory.json",
|
||||
DateTimeOffset.UtcNow,
|
||||
"sha123",
|
||||
"pending",
|
||||
"application/json",
|
||||
new Dictionary<string, string> { ["etag"] = "abc" },
|
||||
new Dictionary<string, string> { ["note"] = "test" },
|
||||
"etag-value",
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
DateTimeOffset.UtcNow.AddDays(30));
|
||||
|
||||
var upserted = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal(id, upserted.Id);
|
||||
|
||||
var fetched = await store.FindBySourceAndUriAsync("source", "https://example.com/advisory.json", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("pending", fetched!.Status);
|
||||
Assert.Equal("test", fetched.Metadata!["note"]);
|
||||
|
||||
var statusUpdated = await store.UpdateStatusAsync(id, "processed", CancellationToken.None);
|
||||
Assert.True(statusUpdated);
|
||||
|
||||
var refreshed = await store.FindAsync(id, CancellationToken.None);
|
||||
Assert.NotNull(refreshed);
|
||||
Assert.Equal("processed", refreshed!.Status);
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class DtoStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public DtoStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndLookupDto()
|
||||
{
|
||||
var store = new DtoStore(_fixture.Database, NullLogger<DtoStore>.Instance);
|
||||
var record = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
Guid.NewGuid(),
|
||||
"source",
|
||||
"1.0",
|
||||
new BsonDocument("value", 1),
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var upserted = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal(record.DocumentId, upserted.DocumentId);
|
||||
|
||||
var fetched = await store.FindByDocumentIdAsync(record.DocumentId, CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(1, fetched!.Payload["value"].AsInt32);
|
||||
|
||||
var bySource = await store.GetBySourceAsync("source", 10, CancellationToken.None);
|
||||
Assert.Single(bySource);
|
||||
Assert.Equal(record.DocumentId, bySource[0].DocumentId);
|
||||
}
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
public sealed class ExportStateManagerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task StoreFullExportInitializesBaseline()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
var record = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:abcd",
|
||||
cursor: "cursor-1",
|
||||
targetRepository: "registry.local/json",
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("export:json", record.Id);
|
||||
Assert.Equal("20240720T120000Z", record.BaseExportId);
|
||||
Assert.Equal("sha256:abcd", record.BaseDigest);
|
||||
Assert.Equal("sha256:abcd", record.LastFullDigest);
|
||||
Assert.Null(record.LastDeltaDigest);
|
||||
Assert.Equal("cursor-1", record.ExportCursor);
|
||||
Assert.Equal("registry.local/json", record.TargetRepository);
|
||||
Assert.Equal("1.0.0", record.ExporterVersion);
|
||||
Assert.Equal(timeProvider.Now, record.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreFullExport_ResetBaselineOverridesExisting()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:base",
|
||||
cursor: "cursor-base",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
var withoutReset = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120500Z",
|
||||
exportDigest: "sha256:new",
|
||||
cursor: "cursor-new",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.1",
|
||||
resetBaseline: false,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240720T120000Z", withoutReset.BaseExportId);
|
||||
Assert.Equal("sha256:base", withoutReset.BaseDigest);
|
||||
Assert.Equal("sha256:new", withoutReset.LastFullDigest);
|
||||
Assert.Equal("cursor-new", withoutReset.ExportCursor);
|
||||
Assert.Equal(timeProvider.Now, withoutReset.UpdatedAt);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
var reset = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T121000Z",
|
||||
exportDigest: "sha256:final",
|
||||
cursor: "cursor-final",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.2",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240720T121000Z", reset.BaseExportId);
|
||||
Assert.Equal("sha256:final", reset.BaseDigest);
|
||||
Assert.Equal("sha256:final", reset.LastFullDigest);
|
||||
Assert.Null(reset.LastDeltaDigest);
|
||||
Assert.Equal("cursor-final", reset.ExportCursor);
|
||||
Assert.Equal(timeProvider.Now, reset.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreFullExport_ResetsBaselineWhenRepositoryChanges()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-21T08:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240721T080000Z",
|
||||
exportDigest: "sha256:base",
|
||||
cursor: "cursor-base",
|
||||
targetRepository: "registry/v1/json",
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(10));
|
||||
var updated = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240721T081000Z",
|
||||
exportDigest: "sha256:new",
|
||||
cursor: "cursor-new",
|
||||
targetRepository: "registry/v2/json",
|
||||
exporterVersion: "1.1.0",
|
||||
resetBaseline: false,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240721T081000Z", updated.BaseExportId);
|
||||
Assert.Equal("sha256:new", updated.BaseDigest);
|
||||
Assert.Equal("sha256:new", updated.LastFullDigest);
|
||||
Assert.Equal("registry/v2/json", updated.TargetRepository);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreDeltaExportRequiresBaseline()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var manager = new ExportStateManager(store);
|
||||
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() => manager.StoreDeltaExportAsync(
|
||||
exporterId: "export:json",
|
||||
deltaDigest: "sha256:def",
|
||||
cursor: null,
|
||||
exporterVersion: "1.0.1",
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreDeltaExportUpdatesExistingState()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:abcd",
|
||||
cursor: "cursor-1",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(10));
|
||||
var delta = await manager.StoreDeltaExportAsync(
|
||||
exporterId: "export:json",
|
||||
deltaDigest: "sha256:ef01",
|
||||
cursor: "cursor-2",
|
||||
exporterVersion: "1.0.1",
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("sha256:ef01", delta.LastDeltaDigest);
|
||||
Assert.Equal("cursor-2", delta.ExportCursor);
|
||||
Assert.Equal("1.0.1", delta.ExporterVersion);
|
||||
Assert.Equal(timeProvider.Now, delta.UpdatedAt);
|
||||
Assert.Equal("sha256:abcd", delta.LastFullDigest);
|
||||
}
|
||||
|
||||
private sealed class InMemoryExportStateStore : IExportStateStore
|
||||
{
|
||||
private readonly Dictionary<string, ExportStateRecord> _records = new(StringComparer.Ordinal);
|
||||
|
||||
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(id, out var record);
|
||||
return Task.FromResult<ExportStateRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestTimeProvider : TimeProvider
|
||||
{
|
||||
public TestTimeProvider(DateTimeOffset start) => Now = start;
|
||||
|
||||
public DateTimeOffset Now { get; private set; }
|
||||
|
||||
public void Advance(TimeSpan delta) => Now = Now.Add(delta);
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => Now;
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class ExportStateStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public ExportStateStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndFetchExportState()
|
||||
{
|
||||
var store = new ExportStateStore(_fixture.Database, NullLogger<ExportStateStore>.Instance);
|
||||
var record = new ExportStateRecord(
|
||||
Id: "json",
|
||||
BaseExportId: "base",
|
||||
BaseDigest: "sha-base",
|
||||
LastFullDigest: "sha-full",
|
||||
LastDeltaDigest: null,
|
||||
ExportCursor: "cursor",
|
||||
TargetRepository: "repo",
|
||||
ExporterVersion: "1.0",
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
Files: Array.Empty<ExportFileRecord>());
|
||||
|
||||
var saved = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal("json", saved.Id);
|
||||
Assert.Empty(saved.Files);
|
||||
|
||||
var fetched = await store.FindAsync("json", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("sha-full", fetched!.LastFullDigest);
|
||||
Assert.Empty(fetched.Files);
|
||||
}
|
||||
}
|
||||
@@ -1,174 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Linksets;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Linksets;
|
||||
|
||||
public sealed class ConcelierMongoLinksetStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public ConcelierMongoLinksetStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MapToDocument_StoresConfidenceAndConflicts()
|
||||
{
|
||||
var linkset = new AdvisoryLinkset(
|
||||
"tenant",
|
||||
"ghsa",
|
||||
"GHSA-1234",
|
||||
ImmutableArray.Create("obs-1", "obs-2"),
|
||||
null,
|
||||
new AdvisoryLinksetProvenance(new[] { "h1", "h2" }, "tool", "policy"),
|
||||
0.82,
|
||||
new List<AdvisoryLinksetConflict>
|
||||
{
|
||||
new("severity", "disagree", new[] { "HIGH", "MEDIUM" }, new[] { "source-a", "source-b" })
|
||||
},
|
||||
DateTimeOffset.UtcNow,
|
||||
"job-1");
|
||||
|
||||
var method = typeof(ConcelierMongoLinksetStore).GetMethod(
|
||||
"MapToDocument",
|
||||
BindingFlags.NonPublic | BindingFlags.Static);
|
||||
|
||||
Assert.NotNull(method);
|
||||
|
||||
var document = (AdvisoryLinksetDocument)method!.Invoke(null, new object?[] { linkset })!;
|
||||
|
||||
Assert.Equal(linkset.Confidence, document.Confidence);
|
||||
Assert.NotNull(document.Conflicts);
|
||||
Assert.Single(document.Conflicts!);
|
||||
Assert.Equal("severity", document.Conflicts![0].Field);
|
||||
Assert.Equal("disagree", document.Conflicts![0].Reason);
|
||||
Assert.Equal(new[] { "source-a", "source-b" }, document.Conflicts![0].SourceIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromDocument_RestoresConfidenceAndConflicts()
|
||||
{
|
||||
var doc = new AdvisoryLinksetDocument
|
||||
{
|
||||
TenantId = "tenant",
|
||||
Source = "ghsa",
|
||||
AdvisoryId = "GHSA-1234",
|
||||
Observations = new List<string> { "obs-1" },
|
||||
Confidence = 0.5,
|
||||
Conflicts = new List<AdvisoryLinksetConflictDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Field = "references",
|
||||
Reason = "mismatch",
|
||||
Values = new List<string> { "url1", "url2" },
|
||||
SourceIds = new List<string> { "src-a", "src-b" }
|
||||
}
|
||||
},
|
||||
CreatedAt = DateTime.UtcNow
|
||||
};
|
||||
|
||||
var method = typeof(ConcelierMongoLinksetStore).GetMethod(
|
||||
"FromDocument",
|
||||
BindingFlags.NonPublic | BindingFlags.Static);
|
||||
|
||||
Assert.NotNull(method);
|
||||
|
||||
var model = (AdvisoryLinkset)method!.Invoke(null, new object?[] { doc })!;
|
||||
|
||||
Assert.Equal(0.5, model.Confidence);
|
||||
Assert.NotNull(model.Conflicts);
|
||||
Assert.Single(model.Conflicts!);
|
||||
Assert.Equal("references", model.Conflicts![0].Field);
|
||||
Assert.Equal(new[] { "src-a", "src-b" }, model.Conflicts![0].SourceIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByTenantAsync_OrdersByCreatedAtThenAdvisoryId()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var store = new ConcelierMongoLinksetStore(collection);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var linksets = new[]
|
||||
{
|
||||
new AdvisoryLinkset("Tenant-A", "src", "ADV-002", ImmutableArray.Create("obs-1"), null, null, null, null, now, "job-1"),
|
||||
new AdvisoryLinkset("Tenant-A", "src", "ADV-001", ImmutableArray.Create("obs-2"), null, null, null, null, now, "job-2"),
|
||||
new AdvisoryLinkset("Tenant-A", "src", "ADV-003", ImmutableArray.Create("obs-3"), null, null, null, null, now.AddMinutes(-5), "job-3")
|
||||
};
|
||||
|
||||
foreach (var linkset in linksets)
|
||||
{
|
||||
await store.UpsertAsync(linkset, CancellationToken.None);
|
||||
}
|
||||
|
||||
var results = await store.FindByTenantAsync("TENANT-A", null, null, cursor: null, limit: 10, cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal(new[] { "ADV-001", "ADV-002", "ADV-003" }, results.Select(r => r.AdvisoryId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByTenantAsync_AppliesCursorForDeterministicPaging()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var store = new ConcelierMongoLinksetStore(collection);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var firstPage = new[]
|
||||
{
|
||||
new AdvisoryLinkset("tenant-a", "src", "ADV-010", ImmutableArray.Create("obs-1"), null, null, null, null, now, "job-1"),
|
||||
new AdvisoryLinkset("tenant-a", "src", "ADV-020", ImmutableArray.Create("obs-2"), null, null, null, null, now, "job-2"),
|
||||
new AdvisoryLinkset("tenant-a", "src", "ADV-030", ImmutableArray.Create("obs-3"), null, null, null, null, now.AddMinutes(-10), "job-3")
|
||||
};
|
||||
|
||||
foreach (var linkset in firstPage)
|
||||
{
|
||||
await store.UpsertAsync(linkset, CancellationToken.None);
|
||||
}
|
||||
|
||||
var initial = await store.FindByTenantAsync("tenant-a", null, null, cursor: null, limit: 10, cancellationToken: CancellationToken.None);
|
||||
var cursor = new AdvisoryLinksetCursor(initial[1].CreatedAt, initial[1].AdvisoryId);
|
||||
|
||||
var paged = await store.FindByTenantAsync("tenant-a", null, null, cursor, limit: 10, cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Single(paged);
|
||||
Assert.Equal("ADV-030", paged[0].AdvisoryId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Upsert_NormalizesTenantToLowerInvariant()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var store = new ConcelierMongoLinksetStore(collection);
|
||||
|
||||
var linkset = new AdvisoryLinkset("Tenant-A", "ghsa", "GHSA-1", ImmutableArray.Create("obs-1"), null, null, null, null, DateTimeOffset.UtcNow, "job-1");
|
||||
await store.UpsertAsync(linkset, CancellationToken.None);
|
||||
|
||||
var fetched = await collection.Find(Builders<AdvisoryLinksetDocument>.Filter.Empty).FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("tenant-a", fetched!.TenantId);
|
||||
|
||||
var results = await store.FindByTenantAsync("TENANT-A", null, null, cursor: null, limit: 10, cancellationToken: CancellationToken.None);
|
||||
Assert.Single(results);
|
||||
Assert.Equal("GHSA-1", results[0].AdvisoryId);
|
||||
}
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MergeEventStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MergeEventStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AppendAndReadMergeEvents()
|
||||
{
|
||||
var store = new MergeEventStore(_fixture.Database, NullLogger<MergeEventStore>.Instance);
|
||||
var record = new MergeEventRecord(
|
||||
Guid.NewGuid(),
|
||||
"ADV-1",
|
||||
new byte[] { 0x01 },
|
||||
new byte[] { 0x02 },
|
||||
DateTimeOffset.UtcNow,
|
||||
new List<Guid> { Guid.NewGuid() },
|
||||
Array.Empty<MergeFieldDecision>());
|
||||
|
||||
await store.AppendAsync(record, CancellationToken.None);
|
||||
|
||||
var recent = await store.GetRecentAsync("ADV-1", 10, CancellationToken.None);
|
||||
Assert.Single(recent);
|
||||
Assert.Equal(record.AfterHash, recent[0].AfterHash);
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class EnsureAdvisoryLinksetsTenantLowerMigrationTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public EnsureAdvisoryLinksetsTenantLowerMigrationTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_LowersTenantIds()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var collection = _fixture.Database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
new BsonDocument { { "TenantId", "Tenant-A" }, { "Source", "src" }, { "AdvisoryId", "ADV-1" }, { "Observations", new BsonArray() } },
|
||||
new BsonDocument { { "TenantId", "tenant-b" }, { "Source", "src" }, { "AdvisoryId", "ADV-2" }, { "Observations", new BsonArray() } },
|
||||
new BsonDocument { { "Source", "src" }, { "AdvisoryId", "ADV-3" }, { "Observations", new BsonArray() } } // missing tenant should be ignored
|
||||
});
|
||||
|
||||
var migration = new EnsureAdvisoryLinksetsTenantLowerMigration();
|
||||
await migration.ApplyAsync(_fixture.Database, default);
|
||||
|
||||
var all = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
Assert.Contains(all, doc => doc["TenantId"] == "tenant-a");
|
||||
Assert.Contains(all, doc => doc["TenantId"] == "tenant-b");
|
||||
}
|
||||
}
|
||||
@@ -1,346 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Raw;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class EnsureAdvisoryObservationsRawLinksetMigrationTests
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public EnsureAdvisoryObservationsRawLinksetMigrationTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_BackfillsRawLinksetFromRawDocument()
|
||||
{
|
||||
var databaseName = $"concelier-rawlinkset-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
|
||||
try
|
||||
{
|
||||
var rawRepository = new MongoAdvisoryRawRepository(
|
||||
database,
|
||||
TimeProvider.System,
|
||||
NullLogger<MongoAdvisoryRawRepository>.Instance);
|
||||
|
||||
var rawDocument = RawDocumentFactory.CreateAdvisory(
|
||||
tenant: "tenant-a",
|
||||
source: new RawSourceMetadata("Vendor-X", "connector-y", "1.0.0", "stable"),
|
||||
upstream: new RawUpstreamMetadata(
|
||||
UpstreamId: "GHSA-2025-0001",
|
||||
DocumentVersion: "v1",
|
||||
RetrievedAt: DateTimeOffset.Parse("2025-10-29T12:34:56Z"),
|
||||
ContentHash: "sha256:abc123",
|
||||
Signature: new RawSignatureMetadata(true, "dsse", "key1", "sig1"),
|
||||
Provenance: ImmutableDictionary.CreateRange(new[] { new KeyValuePair<string, string>("api", "https://example.test/api") })),
|
||||
content: new RawContent(
|
||||
Format: "OSV",
|
||||
SpecVersion: "1.0.0",
|
||||
Raw: ParseJsonElement("""{"id":"GHSA-2025-0001"}"""),
|
||||
Encoding: null),
|
||||
identifiers: new RawIdentifiers(
|
||||
Aliases: ImmutableArray.Create("CVE-2025-0001", "cve-2025-0001"),
|
||||
PrimaryId: "CVE-2025-0001"),
|
||||
linkset: new RawLinkset
|
||||
{
|
||||
Aliases = ImmutableArray.Create("GHSA-xxxx-yyyy"),
|
||||
PackageUrls = ImmutableArray.Create("pkg:npm/example@1.0.0"),
|
||||
Cpes = ImmutableArray.Create("cpe:/a:example:product:1.0"),
|
||||
References = ImmutableArray.Create(new RawReference("advisory", "https://example.test/advisory", "vendor")),
|
||||
ReconciledFrom = ImmutableArray.Create("connector-y"),
|
||||
Notes = ImmutableDictionary.CreateRange(new[] { new KeyValuePair<string, string>("range-fixed", "1.0.1") })
|
||||
},
|
||||
advisoryKey: "CVE-2025-0001",
|
||||
links: ImmutableArray.Create(
|
||||
new RawLink("CVE", "CVE-2025-0001"),
|
||||
new RawLink("GHSA", "GHSA-2025-0001"),
|
||||
new RawLink("PRIMARY", "CVE-2025-0001")));
|
||||
|
||||
await rawRepository.UpsertAsync(rawDocument, CancellationToken.None);
|
||||
|
||||
var expectedRawLinkset = BuildRawLinkset(rawDocument.Identifiers, rawDocument.Linkset);
|
||||
var canonicalAliases = ImmutableArray.Create("cve-2025-0001", "ghsa-xxxx-yyyy");
|
||||
var canonicalPurls = rawDocument.Linkset.PackageUrls;
|
||||
var canonicalCpes = rawDocument.Linkset.Cpes;
|
||||
var canonicalReferences = rawDocument.Linkset.References;
|
||||
|
||||
var observationId = "tenant-a:vendor-x:ghsa-2025-0001:sha256-abc123";
|
||||
var observationBson = BuildObservationDocument(
|
||||
observationId,
|
||||
rawDocument,
|
||||
canonicalAliases,
|
||||
canonicalPurls,
|
||||
canonicalCpes,
|
||||
canonicalReferences,
|
||||
rawDocument.Upstream.RetrievedAt,
|
||||
includeRawLinkset: false);
|
||||
await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations)
|
||||
.InsertOneAsync(observationBson);
|
||||
|
||||
var migration = new EnsureAdvisoryObservationsRawLinksetMigration();
|
||||
await migration.ApplyAsync(database, CancellationToken.None);
|
||||
|
||||
var storedBson = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations)
|
||||
.Find(Builders<BsonDocument>.Filter.Eq("_id", observationId))
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(storedBson);
|
||||
Assert.True(storedBson.TryGetValue("rawLinkset", out var rawLinksetValue));
|
||||
|
||||
var storedDocument = BsonSerializer.Deserialize<AdvisoryObservationDocument>(storedBson);
|
||||
var storedObservation = AdvisoryObservationDocumentFactory.ToModel(storedDocument);
|
||||
|
||||
Assert.True(expectedRawLinkset.Aliases.SequenceEqual(storedObservation.RawLinkset.Aliases, StringComparer.Ordinal));
|
||||
Assert.True(expectedRawLinkset.PackageUrls.SequenceEqual(storedObservation.RawLinkset.PackageUrls, StringComparer.Ordinal));
|
||||
Assert.True(expectedRawLinkset.Cpes.SequenceEqual(storedObservation.RawLinkset.Cpes, StringComparer.Ordinal));
|
||||
Assert.True(expectedRawLinkset.References.SequenceEqual(storedObservation.RawLinkset.References));
|
||||
Assert.Equal(expectedRawLinkset.Notes, storedObservation.RawLinkset.Notes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_ThrowsWhenRawDocumentMissing()
|
||||
{
|
||||
var databaseName = $"concelier-rawlinkset-missing-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
|
||||
try
|
||||
{
|
||||
var rawDocument = RawDocumentFactory.CreateAdvisory(
|
||||
tenant: "tenant-b",
|
||||
source: new RawSourceMetadata("Vendor-Y", "connector-z", "2.0.0", "stable"),
|
||||
upstream: new RawUpstreamMetadata(
|
||||
UpstreamId: "GHSA-9999-0001",
|
||||
DocumentVersion: "v2",
|
||||
RetrievedAt: DateTimeOffset.Parse("2025-10-30T00:00:00Z"),
|
||||
ContentHash: "sha256:def456",
|
||||
Signature: new RawSignatureMetadata(false),
|
||||
Provenance: ImmutableDictionary<string, string>.Empty),
|
||||
content: new RawContent(
|
||||
Format: "OSV",
|
||||
SpecVersion: "1.0.0",
|
||||
Raw: ParseJsonElement("""{"id":"GHSA-9999-0001"}"""),
|
||||
Encoding: null),
|
||||
identifiers: new RawIdentifiers(
|
||||
Aliases: ImmutableArray<string>.Empty,
|
||||
PrimaryId: "GHSA-9999-0001"),
|
||||
linkset: new RawLinkset(),
|
||||
advisoryKey: "GHSA-9999-0001",
|
||||
links: ImmutableArray.Create(
|
||||
new RawLink("GHSA", "GHSA-9999-0001"),
|
||||
new RawLink("PRIMARY", "GHSA-9999-0001")));
|
||||
|
||||
var observationId = "tenant-b:vendor-y:ghsa-9999-0001:sha256-def456";
|
||||
var document = BuildObservationDocument(
|
||||
observationId,
|
||||
rawDocument,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<RawReference>.Empty,
|
||||
rawDocument.Upstream.RetrievedAt,
|
||||
includeRawLinkset: false);
|
||||
|
||||
await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations)
|
||||
.InsertOneAsync(document);
|
||||
|
||||
var migration = new EnsureAdvisoryObservationsRawLinksetMigration();
|
||||
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(
|
||||
() => migration.ApplyAsync(database, CancellationToken.None));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static BsonDocument BuildObservationDocument(
|
||||
string observationId,
|
||||
AdvisoryRawDocument rawDocument,
|
||||
ImmutableArray<string> canonicalAliases,
|
||||
ImmutableArray<string> canonicalPurls,
|
||||
ImmutableArray<string> canonicalCpes,
|
||||
ImmutableArray<RawReference> canonicalReferences,
|
||||
DateTimeOffset createdAt,
|
||||
bool includeRawLinkset,
|
||||
RawLinkset? rawLinkset = null)
|
||||
{
|
||||
var sourceDocument = new BsonDocument
|
||||
{
|
||||
{ "vendor", rawDocument.Source.Vendor },
|
||||
{ "stream", string.IsNullOrWhiteSpace(rawDocument.Source.Stream) ? rawDocument.Source.Connector : rawDocument.Source.Stream! },
|
||||
{ "api", rawDocument.Upstream.Provenance.TryGetValue("api", out var api) ? api : rawDocument.Source.Connector }
|
||||
};
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Source.ConnectorVersion))
|
||||
{
|
||||
sourceDocument["collectorVersion"] = rawDocument.Source.ConnectorVersion;
|
||||
}
|
||||
|
||||
var signatureDocument = new BsonDocument
|
||||
{
|
||||
{ "present", rawDocument.Upstream.Signature.Present }
|
||||
};
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Upstream.Signature.Format))
|
||||
{
|
||||
signatureDocument["format"] = rawDocument.Upstream.Signature.Format;
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Upstream.Signature.KeyId))
|
||||
{
|
||||
signatureDocument["keyId"] = rawDocument.Upstream.Signature.KeyId;
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Upstream.Signature.Signature))
|
||||
{
|
||||
signatureDocument["signature"] = rawDocument.Upstream.Signature.Signature;
|
||||
}
|
||||
|
||||
var upstreamDocument = new BsonDocument
|
||||
{
|
||||
{ "upstream_id", rawDocument.Upstream.UpstreamId },
|
||||
{ "document_version", rawDocument.Upstream.DocumentVersion },
|
||||
{ "fetchedAt", rawDocument.Upstream.RetrievedAt.UtcDateTime },
|
||||
{ "receivedAt", rawDocument.Upstream.RetrievedAt.UtcDateTime },
|
||||
{ "contentHash", rawDocument.Upstream.ContentHash },
|
||||
{ "signature", signatureDocument },
|
||||
{ "metadata", new BsonDocument(rawDocument.Upstream.Provenance) }
|
||||
};
|
||||
|
||||
var contentDocument = new BsonDocument
|
||||
{
|
||||
{ "format", rawDocument.Content.Format },
|
||||
{ "raw", BsonDocument.Parse(rawDocument.Content.Raw.GetRawText()) }
|
||||
};
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Content.SpecVersion))
|
||||
{
|
||||
contentDocument["specVersion"] = rawDocument.Content.SpecVersion;
|
||||
}
|
||||
|
||||
var canonicalLinkset = new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray(canonicalAliases) },
|
||||
{ "purls", new BsonArray(canonicalPurls) },
|
||||
{ "cpes", new BsonArray(canonicalCpes) },
|
||||
{ "references", new BsonArray(canonicalReferences.Select(reference => new BsonDocument
|
||||
{
|
||||
{ "type", reference.Type },
|
||||
{ "url", reference.Url }
|
||||
})) }
|
||||
};
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
{ "_id", observationId },
|
||||
{ "tenant", rawDocument.Tenant },
|
||||
{ "source", sourceDocument },
|
||||
{ "upstream", upstreamDocument },
|
||||
{ "content", contentDocument },
|
||||
{ "linkset", canonicalLinkset },
|
||||
{ "createdAt", createdAt.UtcDateTime },
|
||||
{ "attributes", new BsonDocument() }
|
||||
};
|
||||
|
||||
if (includeRawLinkset)
|
||||
{
|
||||
var actualRawLinkset = rawLinkset ?? throw new ArgumentNullException(nameof(rawLinkset));
|
||||
document["rawLinkset"] = new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray(actualRawLinkset.Aliases) },
|
||||
{ "purls", new BsonArray(actualRawLinkset.PackageUrls) },
|
||||
{ "cpes", new BsonArray(actualRawLinkset.Cpes) },
|
||||
{ "references", new BsonArray(actualRawLinkset.References.Select(reference => new BsonDocument
|
||||
{
|
||||
{ "type", reference.Type },
|
||||
{ "url", reference.Url },
|
||||
{ "source", reference.Source }
|
||||
})) },
|
||||
{ "reconciled_from", new BsonArray(actualRawLinkset.ReconciledFrom) },
|
||||
{ "notes", new BsonDocument(actualRawLinkset.Notes) }
|
||||
};
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
private static JsonElement ParseJsonElement(string json)
|
||||
{
|
||||
using var document = JsonDocument.Parse(json);
|
||||
return document.RootElement.Clone();
|
||||
}
|
||||
|
||||
private static RawLinkset BuildRawLinkset(RawIdentifiers identifiers, RawLinkset linkset)
|
||||
{
|
||||
var aliasBuilder = ImmutableArray.CreateBuilder<string>();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(identifiers.PrimaryId))
|
||||
{
|
||||
aliasBuilder.Add(identifiers.PrimaryId);
|
||||
}
|
||||
|
||||
if (!identifiers.Aliases.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var alias in identifiers.Aliases)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(alias))
|
||||
{
|
||||
aliasBuilder.Add(alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!linkset.Aliases.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var alias in linkset.Aliases)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(alias))
|
||||
{
|
||||
aliasBuilder.Add(alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static ImmutableArray<string> EnsureArray(ImmutableArray<string> values)
|
||||
=> values.IsDefault ? ImmutableArray<string>.Empty : values;
|
||||
|
||||
static ImmutableArray<RawReference> EnsureReferences(ImmutableArray<RawReference> values)
|
||||
=> values.IsDefault ? ImmutableArray<RawReference>.Empty : values;
|
||||
|
||||
return linkset with
|
||||
{
|
||||
Aliases = aliasBuilder.ToImmutable(),
|
||||
PackageUrls = EnsureArray(linkset.PackageUrls),
|
||||
Cpes = EnsureArray(linkset.Cpes),
|
||||
References = EnsureReferences(linkset.References),
|
||||
ReconciledFrom = EnsureArray(linkset.ReconciledFrom),
|
||||
Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,706 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoMigrationRunnerTests
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoMigrationRunnerTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_AppliesPendingMigrationsOnce()
|
||||
{
|
||||
var databaseName = $"concelier-migrations-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new TestMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, migration.ApplyCount);
|
||||
|
||||
var count = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Migrations)
|
||||
.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
|
||||
Assert.Equal(1, count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureDocumentExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-doc-ttl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var options = Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.FromDays(45),
|
||||
RawDocumentRetentionTtlGrace = TimeSpan.FromHours(12),
|
||||
});
|
||||
|
||||
var migration = new EnsureDocumentExpiryIndexesMigration(options);
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document)
|
||||
.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
var ttlIndex = indexList.Single(x => x["name"].AsString == "document_expiresAt_ttl");
|
||||
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
|
||||
Assert.True(ttlIndex["partialFilterExpression"].AsBsonDocument["expiresAt"].AsBsonDocument["$exists"].ToBoolean());
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureDocumentExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-doc-notl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document);
|
||||
var keys = Builders<BsonDocument>.IndexKeys.Ascending("expiresAt");
|
||||
var options = new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "document_expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero,
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("expiresAt", true),
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
|
||||
|
||||
var migration = new EnsureDocumentExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.Zero,
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await collection.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
Assert.DoesNotContain(indexList, x => x["name"].AsString == "document_expiresAt_ttl");
|
||||
var nonTtl = indexList.Single(x => x["name"].AsString == "document_expiresAt");
|
||||
Assert.False(nonTtl.Contains("expireAfterSeconds"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureGridFsExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-gridfs-ttl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("documents.files");
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.FromDays(30),
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await database.GetCollection<BsonDocument>("documents.files").Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
var ttlIndex = indexList.Single(x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
|
||||
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureGridFsExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-gridfs-notl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("documents.files");
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>("documents.files");
|
||||
var keys = Builders<BsonDocument>.IndexKeys.Ascending("metadata.expiresAt");
|
||||
var options = new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "gridfs_files_expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero,
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("metadata.expiresAt", true),
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
|
||||
|
||||
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.Zero,
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await collection.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
Assert.DoesNotContain(indexList, x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryEventCollectionsMigration_CreatesIndexes()
|
||||
{
|
||||
var databaseName = $"concelier-advisory-events-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryStatements);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryConflicts);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisoryEventCollectionsMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var statementIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var statementIndexNames = (await statementIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_statements_vulnerability_asof_desc", statementIndexNames);
|
||||
Assert.Contains("advisory_statements_statementHash_unique", statementIndexNames);
|
||||
|
||||
var conflictIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryConflicts)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var conflictIndexNames = (await conflictIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_conflicts_vulnerability_asof_desc", conflictIndexNames);
|
||||
Assert.Contains("advisory_conflicts_conflictHash_unique", conflictIndexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestMigration : IMongoMigration
|
||||
{
|
||||
public int ApplyCount { get; private set; }
|
||||
|
||||
public string Id => "999_test";
|
||||
|
||||
public string Description => "test migration";
|
||||
|
||||
public Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||
{
|
||||
ApplyCount++;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawValidatorMigration_AppliesSchemaWithDefaultOptions()
|
||||
{
|
||||
var databaseName = $"concelier-advisory-validator-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisoryRawValidatorMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
AdvisoryRawValidator = new MongoCollectionValidatorOptions
|
||||
{
|
||||
Level = MongoValidationLevel.Moderate,
|
||||
Action = MongoValidationAction.Warn,
|
||||
},
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var collectionInfo = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
var options = collectionInfo["options"].AsBsonDocument;
|
||||
|
||||
Assert.Equal("moderate", options["validationLevel"].AsString);
|
||||
Assert.Equal("warn", options["validationAction"].AsString);
|
||||
|
||||
var schema = options["validator"]["$jsonSchema"].AsBsonDocument;
|
||||
var required = schema["required"].AsBsonArray.Select(x => x.AsString).ToArray();
|
||||
Assert.Contains("tenant", required);
|
||||
Assert.Contains("source", required);
|
||||
Assert.Contains("upstream", required);
|
||||
Assert.Contains("content", required);
|
||||
Assert.Contains("linkset", required);
|
||||
|
||||
var patternProperties = schema["patternProperties"].AsBsonDocument;
|
||||
Assert.True(patternProperties.Contains("^(?i)(severity|cvss|cvss_vector|merged_from|consensus_provider|reachability|asset_criticality|risk_score)$"));
|
||||
Assert.True(patternProperties.Contains("^(?i)effective_"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawValidatorMigration_HonorsValidationToggles()
|
||||
{
|
||||
var databaseName = $"advraw-validator-off-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
// Pre-create collection to exercise collMod path.
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
var migration = new EnsureAdvisoryRawValidatorMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
AdvisoryRawValidator = new MongoCollectionValidatorOptions
|
||||
{
|
||||
Level = MongoValidationLevel.Off,
|
||||
Action = MongoValidationAction.Error,
|
||||
},
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var collectionInfo = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
var options = collectionInfo["options"].AsBsonDocument;
|
||||
|
||||
Assert.Equal("off", options["validationLevel"].AsString);
|
||||
Assert.Equal("error", options["validationAction"].AsString);
|
||||
Assert.True(options.Contains("validator"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawIdempotencyIndexMigration_CreatesUniqueIndex()
|
||||
{
|
||||
var databaseName = $"advraw-idx-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
await collection.InsertOneAsync(
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:alpha:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "ALPHA",
|
||||
contentHash: "sha256:abc",
|
||||
tenant: "tenant-a",
|
||||
retrievedAt: new DateTime(2025, 1, 1, 0, 0, 0, DateTimeKind.Utc)));
|
||||
|
||||
var migration = new EnsureAdvisoryRawIdempotencyIndexMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
using var cursor = await collection.Indexes.ListAsync();
|
||||
var indexes = await cursor.ToListAsync();
|
||||
var idempotencyIndex = indexes.Single(x => x["name"].AsString == "advisory_raw_idempotency");
|
||||
|
||||
Assert.True(idempotencyIndex["unique"].ToBoolean());
|
||||
|
||||
var key = idempotencyIndex["key"].AsBsonDocument;
|
||||
Assert.Collection(
|
||||
key.Elements,
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("source.vendor", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
},
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("upstream.upstream_id", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
},
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("upstream.content_hash", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
},
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("tenant", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawIdempotencyIndexMigration_ThrowsWhenDuplicatesExist()
|
||||
{
|
||||
var databaseName = $"advraw-idx-dup-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:beta:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "BETA",
|
||||
contentHash: "sha256:def",
|
||||
tenant: "tenant-b",
|
||||
retrievedAt: new DateTime(2025, 2, 1, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:beta:v2",
|
||||
vendor: "test",
|
||||
upstreamId: "BETA",
|
||||
contentHash: "sha256:def",
|
||||
tenant: "tenant-b",
|
||||
retrievedAt: new DateTime(2025, 2, 2, 0, 0, 0, DateTimeKind.Utc)),
|
||||
});
|
||||
|
||||
var migration = new EnsureAdvisoryRawIdempotencyIndexMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var exception = await Assert.ThrowsAsync<InvalidOperationException>(() => runner.RunAsync(CancellationToken.None));
|
||||
Assert.Contains("duplicate", exception.Message, StringComparison.OrdinalIgnoreCase);
|
||||
Assert.Contains("advisory_raw", exception.Message, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisorySupersedesBackfillMigration_BackfillsSupersedesAndCreatesView()
|
||||
{
|
||||
var databaseName = $"advraw-supersedes-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.InsertOneAsync(new BsonDocument("advisoryKey", "legacy"), cancellationToken: CancellationToken.None);
|
||||
|
||||
var rawCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
await rawCollection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:gamma:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "GAMMA",
|
||||
contentHash: "sha256:111",
|
||||
tenant: "tenant-c",
|
||||
retrievedAt: new DateTime(2024, 12, 1, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:gamma:v2",
|
||||
vendor: "test",
|
||||
upstreamId: "GAMMA",
|
||||
contentHash: "sha256:222",
|
||||
tenant: "tenant-c",
|
||||
retrievedAt: new DateTime(2024, 12, 10, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:gamma:v3",
|
||||
vendor: "test",
|
||||
upstreamId: "GAMMA",
|
||||
contentHash: "sha256:333",
|
||||
tenant: "tenant-c",
|
||||
retrievedAt: new DateTime(2024, 12, 20, 0, 0, 0, DateTimeKind.Utc)),
|
||||
});
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisorySupersedesBackfillMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var info = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.Advisory);
|
||||
Assert.NotNull(info);
|
||||
Assert.Equal("view", info!["type"].AsString);
|
||||
Assert.True(ViewTargets(info!, "advisory_backup_20251028"));
|
||||
|
||||
var docs = await rawCollection
|
||||
.Find(Builders<BsonDocument>.Filter.Empty)
|
||||
.Sort(Builders<BsonDocument>.Sort.Ascending("_id"))
|
||||
.ToListAsync();
|
||||
|
||||
Assert.Equal(BsonNull.Value, docs[0].GetValue("supersedes", BsonNull.Value));
|
||||
Assert.Equal("advisory_raw:test:gamma:v1", docs[1]["supersedes"].AsString);
|
||||
Assert.Equal("advisory_raw:test:gamma:v2", docs[2]["supersedes"].AsString);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisorySupersedesBackfillMigration_IsIdempotentWhenViewExists()
|
||||
{
|
||||
var databaseName = $"advraw-supersedes-idem-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("advisory_backup_20251028");
|
||||
await database.RunCommandAsync<BsonDocument>(new BsonDocument
|
||||
{
|
||||
{ "create", MongoStorageDefaults.Collections.Advisory },
|
||||
{ "viewOn", "advisory_backup_20251028" },
|
||||
});
|
||||
|
||||
var rawCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
await rawCollection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:delta:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "DELTA",
|
||||
contentHash: "sha256:aaa",
|
||||
tenant: "tenant-d",
|
||||
retrievedAt: new DateTime(2024, 11, 1, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:delta:v2",
|
||||
vendor: "test",
|
||||
upstreamId: "DELTA",
|
||||
contentHash: "sha256:bbb",
|
||||
tenant: "tenant-d",
|
||||
retrievedAt: new DateTime(2024, 11, 3, 0, 0, 0, DateTimeKind.Utc)),
|
||||
});
|
||||
|
||||
await rawCollection.UpdateOneAsync(
|
||||
Builders<BsonDocument>.Filter.Eq("_id", "advisory_raw:test:delta:v2"),
|
||||
Builders<BsonDocument>.Update.Set("supersedes", "advisory_raw:test:delta:v1"));
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisorySupersedesBackfillMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var info = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.Advisory);
|
||||
Assert.NotNull(info);
|
||||
Assert.Equal("view", info!["type"].AsString);
|
||||
Assert.True(ViewTargets(info!, "advisory_backup_20251028"));
|
||||
|
||||
var docs = await rawCollection.Find(Builders<BsonDocument>.Filter.Empty).ToListAsync();
|
||||
Assert.Equal(BsonNull.Value, docs.Single(d => d["_id"].AsString == "advisory_raw:test:delta:v1").GetValue("supersedes", BsonNull.Value));
|
||||
Assert.Equal("advisory_raw:test:delta:v1", docs.Single(d => d["_id"].AsString == "advisory_raw:test:delta:v2")["supersedes"].AsString);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<BsonDocument> GetCollectionInfoAsync(IMongoDatabase database, string name)
|
||||
{
|
||||
var command = new BsonDocument
|
||||
{
|
||||
{ "listCollections", 1 },
|
||||
{ "filter", new BsonDocument("name", name) },
|
||||
};
|
||||
|
||||
var result = await database.RunCommandAsync<BsonDocument>(command);
|
||||
var batch = result["cursor"]["firstBatch"].AsBsonArray;
|
||||
return batch.Single().AsBsonDocument;
|
||||
}
|
||||
|
||||
private static bool ViewTargets(BsonDocument info, string expectedSource)
|
||||
{
|
||||
if (!info.TryGetValue("options", out var options) || options is not BsonDocument optionsDoc)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return optionsDoc.TryGetValue("viewOn", out var viewOn) && string.Equals(viewOn.AsString, expectedSource, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static BsonDocument CreateAdvisoryRawDocument(string id, string vendor, string upstreamId, string contentHash, string tenant, DateTime retrievedAt)
|
||||
{
|
||||
return new BsonDocument
|
||||
{
|
||||
{ "_id", id },
|
||||
{ "tenant", tenant },
|
||||
{
|
||||
"source",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "vendor", vendor },
|
||||
{ "connector", "test-connector" },
|
||||
{ "version", "1.0.0" },
|
||||
}
|
||||
},
|
||||
{
|
||||
"upstream",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "upstream_id", upstreamId },
|
||||
{ "document_version", "1" },
|
||||
{ "retrieved_at", retrievedAt },
|
||||
{ "content_hash", contentHash },
|
||||
{ "signature", new BsonDocument { { "present", false } } },
|
||||
{ "provenance", new BsonDocument { { "http.method", "GET" } } },
|
||||
}
|
||||
},
|
||||
{
|
||||
"content",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "format", "csaf" },
|
||||
{ "raw", new BsonDocument("id", upstreamId) },
|
||||
}
|
||||
},
|
||||
{
|
||||
"identifiers",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray(new[] { upstreamId }) },
|
||||
{ "primary", upstreamId },
|
||||
}
|
||||
},
|
||||
{
|
||||
"linkset",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray() },
|
||||
{ "purls", new BsonArray() },
|
||||
{ "cpes", new BsonArray() },
|
||||
{ "references", new BsonArray() },
|
||||
{ "reconciled_from", new BsonArray() },
|
||||
{ "notes", new BsonDocument() },
|
||||
}
|
||||
},
|
||||
{ "advisory_key", upstreamId.ToUpperInvariant() },
|
||||
{
|
||||
"links",
|
||||
new BsonArray
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
{ "scheme", "PRIMARY" },
|
||||
{ "value", upstreamId.ToUpperInvariant() }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "created_at", retrievedAt },
|
||||
{ "ingested_at", retrievedAt },
|
||||
{ "supersedes", BsonNull.Value }
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,223 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Conflicts;
|
||||
using StellaOps.Concelier.Storage.Mongo.Events;
|
||||
using StellaOps.Concelier.Storage.Mongo.Statements;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Provenance.Mongo;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoAdvisoryEventRepositoryTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
private readonly MongoAdvisoryEventRepository _repository;
|
||||
private static readonly ICryptoHash Hash = CryptoHashFactory.CreateDefault();
|
||||
|
||||
public MongoAdvisoryEventRepositoryTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
var statementStore = new AdvisoryStatementStore(_database);
|
||||
var conflictStore = new AdvisoryConflictStore(_database);
|
||||
_repository = new MongoAdvisoryEventRepository(statementStore, conflictStore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndFetchStatements_RoundTripsCanonicalPayload()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-7777", "Sample advisory");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-7777",
|
||||
"CVE-2025-7777",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-19T14:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-19T14:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var results = await _repository.GetStatementsAsync("CVE-2025-7777", null, CancellationToken.None);
|
||||
|
||||
var snapshot = Assert.Single(results);
|
||||
Assert.Equal(entry.StatementId, snapshot.StatementId);
|
||||
Assert.Equal(entry.CanonicalJson, snapshot.CanonicalJson);
|
||||
Assert.True(entry.StatementHash.SequenceEqual(snapshot.StatementHash));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndFetchConflicts_PreservesDetails()
|
||||
{
|
||||
var detailJson = CanonicalJsonSerializer.Serialize(new ConflictPayload("severity", "mismatch"));
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(detailJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
var statementIds = ImmutableArray.Create(Guid.NewGuid(), Guid.NewGuid());
|
||||
|
||||
var entry = new AdvisoryConflictEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-4242",
|
||||
detailJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-19T15:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-19T15:05:00Z"),
|
||||
statementIds);
|
||||
|
||||
await _repository.InsertConflictsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var results = await _repository.GetConflictsAsync("CVE-2025-4242", null, CancellationToken.None);
|
||||
|
||||
var conflict = Assert.Single(results);
|
||||
Assert.Equal(entry.CanonicalJson, conflict.CanonicalJson);
|
||||
Assert.True(entry.StatementIds.SequenceEqual(conflict.StatementIds));
|
||||
Assert.True(entry.ConflictHash.SequenceEqual(conflict.ConflictHash));
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task InsertStatementsAsync_PersistsProvenanceMetadata()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-8888", "Metadata coverage");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
var (dsse, trust) = CreateSampleDsseMetadata();
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-8888",
|
||||
"CVE-2025-8888",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-20T10:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-20T10:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty,
|
||||
dsse,
|
||||
trust);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var statements = _database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements);
|
||||
var stored = await statements
|
||||
.Find(Builders<BsonDocument>.Filter.Eq("_id", entry.StatementId.ToString()))
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(stored);
|
||||
var provenance = stored!["provenance"].AsBsonDocument["dsse"].AsBsonDocument;
|
||||
Assert.Equal(dsse.EnvelopeDigest, provenance["envelopeDigest"].AsString);
|
||||
Assert.Equal(dsse.Key.KeyId, provenance["key"].AsBsonDocument["keyId"].AsString);
|
||||
|
||||
var trustDoc = stored["trust"].AsBsonDocument;
|
||||
Assert.Equal(trust.Verifier, trustDoc["verifier"].AsString);
|
||||
Assert.Equal(trust.Witnesses, trustDoc["witnesses"].AsInt32);
|
||||
|
||||
var roundTrip = await _repository.GetStatementsAsync("CVE-2025-8888", null, CancellationToken.None);
|
||||
var hydrated = Assert.Single(roundTrip);
|
||||
Assert.NotNull(hydrated.Provenance);
|
||||
Assert.NotNull(hydrated.Trust);
|
||||
Assert.Equal(dsse.EnvelopeDigest, hydrated.Provenance!.EnvelopeDigest);
|
||||
Assert.Equal(trust.Verifier, hydrated.Trust!.Verifier);
|
||||
}
|
||||
|
||||
private static Advisory CreateSampleAdvisory(string key, string summary)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance("nvd", "document", key, DateTimeOffset.Parse("2025-10-18T00:00:00Z"), new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
key,
|
||||
key,
|
||||
summary,
|
||||
"en",
|
||||
DateTimeOffset.Parse("2025-10-17T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-18T00:00:00Z"),
|
||||
"medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { key },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task AttachStatementProvenanceAsync_BackfillsExistingRecord()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-9999", "Backfill metadata");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-9999",
|
||||
"CVE-2025-9999",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-21T10:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-21T10:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var (dsse, trust) = CreateSampleDsseMetadata();
|
||||
await _repository.AttachStatementProvenanceAsync(entry.StatementId, dsse, trust, CancellationToken.None);
|
||||
|
||||
var statements = await _repository.GetStatementsAsync("CVE-2025-9999", null, CancellationToken.None);
|
||||
var updated = Assert.Single(statements);
|
||||
Assert.NotNull(updated.Provenance);
|
||||
Assert.NotNull(updated.Trust);
|
||||
Assert.Equal(dsse.EnvelopeDigest, updated.Provenance!.EnvelopeDigest);
|
||||
Assert.Equal(trust.Verifier, updated.Trust!.Verifier);
|
||||
}
|
||||
|
||||
private static (DsseProvenance Provenance, TrustInfo Trust) CreateSampleDsseMetadata()
|
||||
{
|
||||
var provenance = new DsseProvenance
|
||||
{
|
||||
EnvelopeDigest = "sha256:deadbeef",
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Key = new DsseKeyInfo
|
||||
{
|
||||
KeyId = "cosign:SHA256-PKIX:TEST",
|
||||
Issuer = "fulcio",
|
||||
Algo = "ECDSA"
|
||||
},
|
||||
Rekor = new DsseRekorInfo
|
||||
{
|
||||
LogIndex = 42,
|
||||
Uuid = Guid.Parse("2d4d5f7c-1111-4a01-b9cb-aa42022a0a8c").ToString(),
|
||||
IntegratedTime = 1_700_000_000
|
||||
}
|
||||
};
|
||||
|
||||
var trust = new TrustInfo
|
||||
{
|
||||
Verified = true,
|
||||
Verifier = "Authority@stella",
|
||||
Witnesses = 2,
|
||||
PolicyScore = 0.9
|
||||
};
|
||||
|
||||
return (provenance, trust);
|
||||
}
|
||||
|
||||
private sealed record ConflictPayload(string Type, string Reason);
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoBootstrapperTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoBootstrapperTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CreatesNormalizedIndexesWhenSemVerStyleEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-semver-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = true }),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var indexCursor = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var indexNames = (await indexCursor.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_normalizedVersions_pkg_scheme_type", indexNames);
|
||||
Assert.Contains("advisory_normalizedVersions_value", indexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_DoesNotCreateNormalizedIndexesWhenFeatureDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-no-semver-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = false }),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var indexCursor = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var indexNames = (await indexCursor.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.DoesNotContain("advisory_normalizedVersions_pkg_scheme_type", indexNames);
|
||||
Assert.DoesNotContain("advisory_normalizedVersions_value", indexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CreatesAdvisoryEventIndexes()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-events-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var statementIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var statementIndexNames = (await statementIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_statements_vulnerability_asof_desc", statementIndexNames);
|
||||
Assert.Contains("advisory_statements_statementHash_unique", statementIndexNames);
|
||||
|
||||
var conflictIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryConflicts)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var conflictIndexNames = (await conflictIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_conflicts_vulnerability_asof_desc", conflictIndexNames);
|
||||
Assert.Contains("advisory_conflicts_conflictHash_unique", conflictIndexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoJobStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoJobStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateStartCompleteLifecycle()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var request = new JobRunCreateRequest(
|
||||
Kind: "mongo:test",
|
||||
Trigger: "unit",
|
||||
Parameters: new Dictionary<string, object?> { ["scope"] = "lifecycle" },
|
||||
ParametersHash: "abc",
|
||||
Timeout: TimeSpan.FromSeconds(5),
|
||||
LeaseDuration: TimeSpan.FromSeconds(2),
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var created = await store.CreateAsync(request, CancellationToken.None);
|
||||
Assert.Equal(JobRunStatus.Pending, created.Status);
|
||||
|
||||
var started = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(started);
|
||||
Assert.Equal(JobRunStatus.Running, started!.Status);
|
||||
|
||||
var completed = await store.TryCompleteAsync(created.RunId, new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
|
||||
Assert.NotNull(completed);
|
||||
Assert.Equal(JobRunStatus.Succeeded, completed!.Status);
|
||||
|
||||
var recent = await store.GetRecentRunsAsync("mongo:test", 10, CancellationToken.None);
|
||||
var snapshot = Assert.Single(recent);
|
||||
Assert.Equal(JobRunStatus.Succeeded, snapshot.Status);
|
||||
|
||||
var active = await store.GetActiveRunsAsync(CancellationToken.None);
|
||||
Assert.Empty(active);
|
||||
|
||||
var last = await store.GetLastRunAsync("mongo:test", CancellationToken.None);
|
||||
Assert.NotNull(last);
|
||||
Assert.Equal(completed.RunId, last!.RunId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StartAndFailRunHonorsStateTransitions()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var request = new JobRunCreateRequest(
|
||||
Kind: "mongo:failure",
|
||||
Trigger: "unit",
|
||||
Parameters: new Dictionary<string, object?>(),
|
||||
ParametersHash: null,
|
||||
Timeout: null,
|
||||
LeaseDuration: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var created = await store.CreateAsync(request, CancellationToken.None);
|
||||
var firstStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(firstStart);
|
||||
|
||||
// Second start attempt should be rejected once running.
|
||||
var secondStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow.AddSeconds(1), CancellationToken.None);
|
||||
Assert.Null(secondStart);
|
||||
|
||||
var failure = await store.TryCompleteAsync(
|
||||
created.RunId,
|
||||
new JobRunCompletion(JobRunStatus.Failed, DateTimeOffset.UtcNow.AddSeconds(2), "boom"),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotNull(failure);
|
||||
Assert.Equal("boom", failure!.Error);
|
||||
Assert.Equal(JobRunStatus.Failed, failure.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompletingUnknownRunReturnsNull()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var result = await store.TryCompleteAsync(Guid.NewGuid(), new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
private async Task ResetCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Jobs);
|
||||
}
|
||||
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoSourceStateRepositoryTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoSourceStateRepositoryTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndUpdateCursorFlow()
|
||||
{
|
||||
var repository = new MongoSourceStateRepository(_fixture.Database, NullLogger<MongoSourceStateRepository>.Instance);
|
||||
var sourceName = "nvd";
|
||||
|
||||
var record = new SourceStateRecord(
|
||||
SourceName: sourceName,
|
||||
Enabled: true,
|
||||
Paused: false,
|
||||
Cursor: new BsonDocument("page", 1),
|
||||
LastSuccess: null,
|
||||
LastFailure: null,
|
||||
FailCount: 0,
|
||||
BackoffUntil: null,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
LastFailureReason: null);
|
||||
|
||||
var upserted = await repository.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.True(upserted.Enabled);
|
||||
|
||||
var cursor = new BsonDocument("page", 2);
|
||||
var updated = await repository.UpdateCursorAsync(sourceName, cursor, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(updated);
|
||||
Assert.Equal(0, updated!.FailCount);
|
||||
Assert.Equal(2, updated.Cursor["page"].AsInt32);
|
||||
|
||||
var failure = await repository.MarkFailureAsync(sourceName, DateTimeOffset.UtcNow, TimeSpan.FromMinutes(5), "network timeout", CancellationToken.None);
|
||||
Assert.NotNull(failure);
|
||||
Assert.Equal(1, failure!.FailCount);
|
||||
Assert.NotNull(failure.BackoffUntil);
|
||||
Assert.Equal("network timeout", failure.LastFailureReason);
|
||||
|
||||
var fetched = await repository.TryGetAsync(sourceName, CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(failure.BackoffUntil, fetched!.BackoffUntil);
|
||||
Assert.Equal("network timeout", fetched.LastFailureReason);
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
public sealed class AdvisoryObservationDocumentFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToModel_MapsDocumentToModel()
|
||||
{
|
||||
var document = new AdvisoryObservationDocument
|
||||
{
|
||||
Id = "tenant-a:obs-1",
|
||||
Tenant = "tenant-a",
|
||||
CreatedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc),
|
||||
Source = new AdvisoryObservationSourceDocument
|
||||
{
|
||||
Vendor = "vendor",
|
||||
Stream = "stream",
|
||||
Api = "https://api.example"
|
||||
},
|
||||
Upstream = new AdvisoryObservationUpstreamDocument
|
||||
{
|
||||
UpstreamId = "CVE-2025-1234",
|
||||
DocumentVersion = "1",
|
||||
FetchedAt = DateTime.SpecifyKind(DateTime.UtcNow.AddMinutes(-1), DateTimeKind.Utc),
|
||||
ReceivedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc),
|
||||
ContentHash = "sha256:abc",
|
||||
Signature = new AdvisoryObservationSignatureDocument
|
||||
{
|
||||
Present = true,
|
||||
Format = "pgp",
|
||||
KeyId = "key",
|
||||
Signature = "signature"
|
||||
}
|
||||
},
|
||||
Content = new AdvisoryObservationContentDocument
|
||||
{
|
||||
Format = "CSAF",
|
||||
SpecVersion = "2.0",
|
||||
Raw = BsonDocument.Parse("{\"example\":true}")
|
||||
},
|
||||
Linkset = new AdvisoryObservationLinksetDocument
|
||||
{
|
||||
Aliases = new List<string> { "CVE-2025-1234" },
|
||||
Purls = new List<string> { "pkg:generic/foo@1.0.0" },
|
||||
Cpes = new List<string> { "cpe:/a:vendor:product:1" },
|
||||
References = new List<AdvisoryObservationReferenceDocument>
|
||||
{
|
||||
new() { Type = "advisory", Url = "https://example.com" }
|
||||
}
|
||||
},
|
||||
RawLinkset = new AdvisoryObservationRawLinksetDocument
|
||||
{
|
||||
Aliases = new List<string> { "CVE-2025-1234", "cve-2025-1234" },
|
||||
Scopes = new List<string> { "runtime", "build" },
|
||||
Relationships = new List<AdvisoryObservationRawRelationshipDocument>
|
||||
{
|
||||
new() { Type = "depends_on", Source = "componentA", Target = "componentB", Provenance = "sbom-manifest" }
|
||||
},
|
||||
PackageUrls = new List<string> { "pkg:generic/foo@1.0.0" },
|
||||
Cpes = new List<string> { "cpe:/a:vendor:product:1" },
|
||||
References = new List<AdvisoryObservationRawReferenceDocument>
|
||||
{
|
||||
new() { Type = "Advisory", Url = "https://example.com", Source = "vendor" }
|
||||
},
|
||||
ReconciledFrom = new List<string> { "source-a" },
|
||||
Notes = new Dictionary<string, string> { ["note-key"] = "note-value" }
|
||||
}
|
||||
};
|
||||
|
||||
var observation = AdvisoryObservationDocumentFactory.ToModel(document);
|
||||
|
||||
Assert.Equal("tenant-a:obs-1", observation.ObservationId);
|
||||
Assert.Equal("tenant-a", observation.Tenant);
|
||||
Assert.Equal("CVE-2025-1234", observation.Upstream.UpstreamId);
|
||||
Assert.Equal(new[] { "CVE-2025-1234" }, observation.Linkset.Aliases.ToArray());
|
||||
Assert.Contains("pkg:generic/foo@1.0.0", observation.Linkset.Purls);
|
||||
Assert.Equal("CSAF", observation.Content.Format);
|
||||
Assert.True(observation.Content.Raw?["example"]?.GetValue<bool>());
|
||||
Assert.Equal(document.Linkset.References![0].Type, observation.Linkset.References[0].Type);
|
||||
Assert.Equal(new[] { "CVE-2025-1234", "cve-2025-1234" }, observation.RawLinkset.Aliases);
|
||||
Assert.Equal(new[] { "runtime", "build" }, observation.RawLinkset.Scopes);
|
||||
Assert.Equal("depends_on", observation.RawLinkset.Relationships[0].Type);
|
||||
Assert.Equal("componentA", observation.RawLinkset.Relationships[0].Source);
|
||||
Assert.Equal("componentB", observation.RawLinkset.Relationships[0].Target);
|
||||
Assert.Equal("sbom-manifest", observation.RawLinkset.Relationships[0].Provenance);
|
||||
Assert.Equal("Advisory", observation.RawLinkset.References[0].Type);
|
||||
Assert.Equal("vendor", observation.RawLinkset.References[0].Source);
|
||||
Assert.Equal("note-value", observation.RawLinkset.Notes["note-key"]);
|
||||
}
|
||||
}
|
||||
@@ -1,260 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryObservationStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AdvisoryObservationStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByFiltersAsync_FiltersByAliasAndTenant()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateDocument(
|
||||
id: "tenant-a:nvd:alpha:1",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 1, 1, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "CvE-2025-0001 " },
|
||||
purls: new[] { "pkg:npm/demo@1.0.0" }),
|
||||
CreateDocument(
|
||||
id: "tenant-a:ghsa:beta:1",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 1, 2, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { " ghsa-xyz0", "cve-2025-0001" },
|
||||
purls: new[] { "pkg:npm/demo@1.1.0" }),
|
||||
CreateDocument(
|
||||
id: "tenant-b:nvd:alpha:1",
|
||||
tenant: "tenant-b",
|
||||
createdAt: new DateTime(2025, 1, 3, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "cve-2025-0001" },
|
||||
purls: new[] { "pkg:npm/demo@2.0.0" })
|
||||
});
|
||||
|
||||
var store = new AdvisoryObservationStore(collection);
|
||||
var result = await store.FindByFiltersAsync(
|
||||
tenant: "Tenant-A",
|
||||
observationIds: Array.Empty<string>(),
|
||||
aliases: new[] { " CVE-2025-0001 " },
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
cursor: null,
|
||||
limit: 5,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, result.Count);
|
||||
Assert.Equal("tenant-a:ghsa:beta:1", result[0].ObservationId);
|
||||
Assert.Equal("tenant-a:nvd:alpha:1", result[1].ObservationId);
|
||||
Assert.All(result, observation => Assert.Equal("tenant-a", observation.Tenant));
|
||||
Assert.Equal("ghsa-xyz0", result[0].Linkset.Aliases[0]);
|
||||
Assert.Equal("CvE-2025-0001", result[1].Linkset.Aliases[0]);
|
||||
Assert.Equal(" ghsa-xyz0", result[0].RawLinkset.Aliases[0]);
|
||||
Assert.Equal("CvE-2025-0001 ", result[1].RawLinkset.Aliases[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByFiltersAsync_RespectsObservationIdsAndPurls()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateDocument(
|
||||
id: "tenant-a:osv:alpha:1",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 2, 1, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "cve-2025-0100" },
|
||||
purls: new[] { "pkg:pypi/demo@2.0.0" },
|
||||
cpes: new[] { "cpe:/a:vendor:product:2.0" }),
|
||||
CreateDocument(
|
||||
id: "tenant-a:osv:alpha:2",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 2, 2, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "cve-2025-0100" },
|
||||
purls: new[] { "pkg:pypi/demo@2.1.0" },
|
||||
cpes: new[] { "cpe:/a:vendor:product:2.1" })
|
||||
});
|
||||
|
||||
var store = new AdvisoryObservationStore(collection);
|
||||
var result = await store.FindByFiltersAsync(
|
||||
tenant: "tenant-a",
|
||||
observationIds: new[] { "tenant-a:osv:alpha:1" },
|
||||
aliases: Array.Empty<string>(),
|
||||
purls: new[] { "pkg:pypi/demo@2.0.0" },
|
||||
cpes: new[] { "cpe:/a:vendor:product:2.0" },
|
||||
cursor: null,
|
||||
limit: 5,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Single(result);
|
||||
Assert.Equal("tenant-a:osv:alpha:1", result[0].ObservationId);
|
||||
Assert.Equal(
|
||||
new[] { "pkg:pypi/demo@2.0.0" },
|
||||
result[0].Linkset.Purls.ToArray());
|
||||
Assert.Equal(
|
||||
new[] { "cpe:/a:vendor:product:2.0" },
|
||||
result[0].Linkset.Cpes.ToArray());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByFiltersAsync_AppliesCursorForPagination()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
var createdAt = new DateTime(2025, 3, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateDocument("tenant-a:source:1", "tenant-a", createdAt, aliases: new[] { "cve-1" }),
|
||||
CreateDocument("tenant-a:source:2", "tenant-a", createdAt.AddMinutes(-1), aliases: new[] { "cve-2" }),
|
||||
CreateDocument("tenant-a:source:3", "tenant-a", createdAt.AddMinutes(-2), aliases: new[] { "cve-3" })
|
||||
});
|
||||
|
||||
var store = new AdvisoryObservationStore(collection);
|
||||
|
||||
var firstPage = await store.FindByFiltersAsync(
|
||||
tenant: "tenant-a",
|
||||
observationIds: Array.Empty<string>(),
|
||||
aliases: Array.Empty<string>(),
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
cursor: null,
|
||||
limit: 2,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, firstPage.Count);
|
||||
Assert.Equal("tenant-a:source:1", firstPage[0].ObservationId);
|
||||
Assert.Equal("tenant-a:source:2", firstPage[1].ObservationId);
|
||||
|
||||
var cursor = new AdvisoryObservationCursor(firstPage[1].CreatedAt, firstPage[1].ObservationId);
|
||||
var secondPage = await store.FindByFiltersAsync(
|
||||
tenant: "tenant-a",
|
||||
observationIds: Array.Empty<string>(),
|
||||
aliases: Array.Empty<string>(),
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
cursor: cursor,
|
||||
limit: 2,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Single(secondPage);
|
||||
Assert.Equal("tenant-a:source:3", secondPage[0].ObservationId);
|
||||
}
|
||||
|
||||
private static AdvisoryObservationDocument CreateDocument(
|
||||
string id,
|
||||
string tenant,
|
||||
DateTime createdAt,
|
||||
IEnumerable<string>? aliases = null,
|
||||
IEnumerable<string>? purls = null,
|
||||
IEnumerable<string>? cpes = null)
|
||||
{
|
||||
var canonicalAliases = aliases?
|
||||
.Where(value => value is not null)
|
||||
.Select(value => value.Trim())
|
||||
.ToList();
|
||||
|
||||
var canonicalPurls = purls?
|
||||
.Where(value => value is not null)
|
||||
.Select(value => value.Trim())
|
||||
.ToList();
|
||||
|
||||
var canonicalCpes = cpes?
|
||||
.Where(value => value is not null)
|
||||
.Select(value => value.Trim())
|
||||
.ToList();
|
||||
|
||||
var rawAliases = aliases?
|
||||
.Where(value => value is not null)
|
||||
.ToList();
|
||||
|
||||
var rawPurls = purls?
|
||||
.Where(value => value is not null)
|
||||
.ToList();
|
||||
|
||||
var rawCpes = cpes?
|
||||
.Where(value => value is not null)
|
||||
.ToList();
|
||||
|
||||
return new AdvisoryObservationDocument
|
||||
{
|
||||
Id = id,
|
||||
Tenant = tenant.ToLowerInvariant(),
|
||||
CreatedAt = createdAt,
|
||||
Source = new AdvisoryObservationSourceDocument
|
||||
{
|
||||
Vendor = "nvd",
|
||||
Stream = "feed",
|
||||
Api = "https://example.test/api"
|
||||
},
|
||||
Upstream = new AdvisoryObservationUpstreamDocument
|
||||
{
|
||||
UpstreamId = id,
|
||||
DocumentVersion = null,
|
||||
FetchedAt = createdAt,
|
||||
ReceivedAt = createdAt,
|
||||
ContentHash = $"sha256:{id}",
|
||||
Signature = new AdvisoryObservationSignatureDocument
|
||||
{
|
||||
Present = false
|
||||
},
|
||||
Metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
},
|
||||
Content = new AdvisoryObservationContentDocument
|
||||
{
|
||||
Format = "csaf",
|
||||
SpecVersion = "2.0",
|
||||
Raw = BsonDocument.Parse("""{"id": "%ID%"}""".Replace("%ID%", id)),
|
||||
Metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
},
|
||||
Linkset = new AdvisoryObservationLinksetDocument
|
||||
{
|
||||
Aliases = canonicalAliases,
|
||||
Purls = canonicalPurls,
|
||||
Cpes = canonicalCpes,
|
||||
References = new List<AdvisoryObservationReferenceDocument>()
|
||||
},
|
||||
RawLinkset = new AdvisoryObservationRawLinksetDocument
|
||||
{
|
||||
Aliases = rawAliases,
|
||||
PackageUrls = rawPurls,
|
||||
Cpes = rawCpes,
|
||||
References = new List<AdvisoryObservationRawReferenceDocument>()
|
||||
},
|
||||
Attributes = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
};
|
||||
}
|
||||
|
||||
private async Task ResetCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
}
|
||||
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Collection did not exist – ignore.
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
public class AdvisoryObservationTransportWorkerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Worker_publishes_outbox_entries_and_marks_published_once()
|
||||
{
|
||||
var evt = new AdvisoryObservationUpdatedEvent(
|
||||
Guid.NewGuid(),
|
||||
"tenant-1",
|
||||
"obs-1",
|
||||
"adv-1",
|
||||
new Models.Observations.AdvisoryObservationSource("vendor", "stream", "api", "1.0.0"),
|
||||
new AdvisoryObservationLinksetSummary(
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<AdvisoryObservationRelationshipSummary>.Empty),
|
||||
"doc-sha",
|
||||
"hash-1",
|
||||
DateTimeOffset.UtcNow,
|
||||
ReplayCursor: "cursor-1",
|
||||
SupersedesId: null,
|
||||
TraceId: "trace-1");
|
||||
|
||||
var outbox = new FakeOutbox(evt);
|
||||
var transport = new FakeTransport();
|
||||
var options = Options.Create(new AdvisoryObservationEventPublisherOptions
|
||||
{
|
||||
Enabled = true,
|
||||
Transport = "nats",
|
||||
Subject = "subject",
|
||||
Stream = "stream",
|
||||
NatsUrl = "nats://localhost:4222"
|
||||
});
|
||||
|
||||
var worker = new AdvisoryObservationTransportWorker(outbox, transport, options, NullLogger<AdvisoryObservationTransportWorker>.Instance);
|
||||
|
||||
await worker.StartAsync(CancellationToken.None);
|
||||
await Task.Delay(150, CancellationToken.None);
|
||||
await worker.StopAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, transport.Sent.Count);
|
||||
Assert.Equal(evt.EventId, transport.Sent[0].EventId);
|
||||
Assert.Equal(1, outbox.MarkedCount);
|
||||
}
|
||||
|
||||
private sealed class FakeOutbox : IAdvisoryObservationEventOutbox
|
||||
{
|
||||
private readonly AdvisoryObservationUpdatedEvent _event;
|
||||
private bool _dequeued;
|
||||
public int MarkedCount { get; private set; }
|
||||
|
||||
public FakeOutbox(AdvisoryObservationUpdatedEvent @event)
|
||||
{
|
||||
_event = @event;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyCollection<AdvisoryObservationUpdatedEvent>> DequeueAsync(int take, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_dequeued)
|
||||
{
|
||||
return Task.FromResult<IReadOnlyCollection<AdvisoryObservationUpdatedEvent>>(Array.Empty<AdvisoryObservationUpdatedEvent>());
|
||||
}
|
||||
|
||||
_dequeued = true;
|
||||
return Task.FromResult<IReadOnlyCollection<AdvisoryObservationUpdatedEvent>>(new[] { _event });
|
||||
}
|
||||
|
||||
public Task MarkPublishedAsync(Guid eventId, DateTimeOffset publishedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
MarkedCount++;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeTransport : IAdvisoryObservationEventTransport
|
||||
{
|
||||
public List<AdvisoryObservationUpdatedEvent> Sent { get; } = new();
|
||||
|
||||
public Task SendAsync(AdvisoryObservationUpdatedEvent @event, CancellationToken cancellationToken)
|
||||
{
|
||||
Sent.Add(@event);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations.V1;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
public sealed class AdvisoryObservationV1DocumentFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void ObservationIdBuilder_IsDeterministic()
|
||||
{
|
||||
var id1 = ObservationIdBuilder.Create("TENANT", "Ghsa", "GHSA-1234", "sha256:abc");
|
||||
var id2 = ObservationIdBuilder.Create("tenant", "ghsa", "GHSA-1234", "sha256:abc");
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToModel_MapsAndNormalizes()
|
||||
{
|
||||
var document = new AdvisoryObservationV1Document
|
||||
{
|
||||
Id = new ObjectId("6710f1f1a1b2c3d4e5f60708"),
|
||||
TenantId = "TENANT-01",
|
||||
Source = "GHSA",
|
||||
AdvisoryId = "GHSA-2025-0001",
|
||||
Title = "Test title",
|
||||
Summary = "Summary",
|
||||
Severities = new List<ObservationSeverityDocument>
|
||||
{
|
||||
new() { System = "cvssv3.1", Score = 7.5, Vector = "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N" }
|
||||
},
|
||||
Affected = new List<ObservationAffectedDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Purl = "pkg:nuget/foo@1.2.3",
|
||||
Package = "foo",
|
||||
Versions = new List<string>{ "1.2.3" },
|
||||
Ranges = new List<ObservationVersionRangeDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Type = "ECOSYSTEM",
|
||||
Events = new List<ObservationRangeEventDocument>
|
||||
{
|
||||
new(){ Event = "introduced", Value = "1.0.0" },
|
||||
new(){ Event = "fixed", Value = "1.2.3" }
|
||||
}
|
||||
}
|
||||
},
|
||||
Ecosystem = "nuget",
|
||||
Cpes = new List<string>{ "cpe:/a:foo:bar:1.2.3" }
|
||||
}
|
||||
},
|
||||
References = new List<string>{ "https://example.test/advisory" },
|
||||
Weaknesses = new List<string>{ "CWE-79" },
|
||||
Published = new DateTime(2025, 11, 1, 0, 0, 0, DateTimeKind.Utc),
|
||||
Modified = new DateTime(2025, 11, 10, 0, 0, 0, DateTimeKind.Utc),
|
||||
IngestedAt = new DateTime(2025, 11, 12, 0, 0, 0, DateTimeKind.Utc),
|
||||
Provenance = new ObservationProvenanceDocument
|
||||
{
|
||||
SourceArtifactSha = "sha256:abc",
|
||||
FetchedAt = new DateTime(2025, 11, 12, 0, 0, 0, DateTimeKind.Utc),
|
||||
IngestJobId = "job-1",
|
||||
Signature = new ObservationSignatureDocument
|
||||
{
|
||||
Present = true,
|
||||
Format = "dsse",
|
||||
KeyId = "k1",
|
||||
Signature = "sig"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var model = AdvisoryObservationV1DocumentFactory.ToModel(document);
|
||||
|
||||
Assert.Equal("6710f1f1a1b2c3d4e5f60708", model.ObservationId);
|
||||
Assert.Equal("tenant-01", model.Tenant);
|
||||
Assert.Equal("ghsa", model.Source);
|
||||
Assert.Equal("GHSA-2025-0001", model.AdvisoryId);
|
||||
Assert.Equal("Test title", model.Title);
|
||||
Assert.Single(model.Severities);
|
||||
Assert.Single(model.Affected);
|
||||
Assert.Single(model.References);
|
||||
Assert.Single(model.Weaknesses);
|
||||
Assert.Equal(new DateTimeOffset(2025, 11, 12, 0, 0, 0, TimeSpan.Zero), model.IngestedAt);
|
||||
Assert.NotNull(model.Provenance.Signature);
|
||||
}
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using MongoDB.Driver.GridFS;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class RawDocumentRetentionServiceTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public RawDocumentRetentionServiceTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SweepExpiredDocumentsAsync_RemovesExpiredRawDocuments()
|
||||
{
|
||||
var database = _fixture.Database;
|
||||
var documents = database.GetCollection<DocumentDocument>(MongoStorageDefaults.Collections.Document);
|
||||
var dtos = database.GetCollection<DtoDocument>(MongoStorageDefaults.Collections.Dto);
|
||||
var bucket = new GridFSBucket(database, new GridFSBucketOptions { BucketName = "documents" });
|
||||
|
||||
var now = new DateTimeOffset(2024, 10, 1, 12, 0, 0, TimeSpan.Zero);
|
||||
var fakeTime = new FakeTimeProvider(now);
|
||||
|
||||
var options = Options.Create(new MongoStorageOptions
|
||||
{
|
||||
ConnectionString = _fixture.Runner.ConnectionString,
|
||||
DatabaseName = database.DatabaseNamespace.DatabaseName,
|
||||
RawDocumentRetention = TimeSpan.FromDays(1),
|
||||
RawDocumentRetentionTtlGrace = TimeSpan.Zero,
|
||||
RawDocumentRetentionSweepInterval = TimeSpan.FromMinutes(5),
|
||||
});
|
||||
|
||||
var expiredId = Guid.NewGuid().ToString();
|
||||
var gridFsId = await bucket.UploadFromBytesAsync("expired", new byte[] { 1, 2, 3 });
|
||||
await documents.InsertOneAsync(new DocumentDocument
|
||||
{
|
||||
Id = expiredId,
|
||||
SourceName = "nvd",
|
||||
Uri = "https://example.test/cve",
|
||||
FetchedAt = now.AddDays(-2).UtcDateTime,
|
||||
Sha256 = "abc",
|
||||
Status = "pending",
|
||||
ExpiresAt = now.AddMinutes(-5).UtcDateTime,
|
||||
GridFsId = gridFsId,
|
||||
});
|
||||
|
||||
await dtos.InsertOneAsync(new DtoDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
DocumentId = expiredId,
|
||||
SourceName = "nvd",
|
||||
SchemaVersion = "schema",
|
||||
Payload = new BsonDocument("value", 1),
|
||||
ValidatedAt = now.UtcDateTime,
|
||||
});
|
||||
|
||||
var freshId = Guid.NewGuid().ToString();
|
||||
await documents.InsertOneAsync(new DocumentDocument
|
||||
{
|
||||
Id = freshId,
|
||||
SourceName = "nvd",
|
||||
Uri = "https://example.test/future",
|
||||
FetchedAt = now.UtcDateTime,
|
||||
Sha256 = "def",
|
||||
Status = "pending",
|
||||
ExpiresAt = now.AddHours(1).UtcDateTime,
|
||||
GridFsId = null,
|
||||
});
|
||||
|
||||
var service = new RawDocumentRetentionService(database, options, NullLogger<RawDocumentRetentionService>.Instance, fakeTime);
|
||||
|
||||
var removed = await service.SweepExpiredDocumentsAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, removed);
|
||||
Assert.Equal(0, await documents.CountDocumentsAsync(d => d.Id == expiredId));
|
||||
Assert.Equal(0, await dtos.CountDocumentsAsync(d => d.DocumentId == expiredId));
|
||||
Assert.Equal(1, await documents.CountDocumentsAsync(d => d.Id == freshId));
|
||||
|
||||
var filter = Builders<GridFSFileInfo>.Filter.Eq("_id", gridFsId);
|
||||
using var cursor = await bucket.FindAsync(filter);
|
||||
Assert.Empty(await cursor.ToListAsync());
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user