Add unit tests for VexLens normalizer, CPE parser, product mapper, and PURL parser
- Implemented comprehensive tests for VexLensNormalizer including format detection and normalization scenarios. - Added tests for CpeParser covering CPE 2.3 and 2.2 formats, invalid inputs, and canonical key generation. - Created tests for ProductMapper to validate parsing and matching logic across different strictness levels. - Developed tests for PurlParser to ensure correct parsing of various PURL formats and validation of identifiers. - Introduced stubs for Monaco editor and worker to facilitate testing in the web application. - Updated project file for the test project to include necessary dependencies.
This commit is contained in:
@@ -1,82 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Conflicts;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryConflictStoreTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
|
||||
public AdvisoryConflictStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndRetrieve_PersistsConflicts()
|
||||
{
|
||||
var store = new AdvisoryConflictStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
var statementIds = new[] { Guid.NewGuid(), Guid.NewGuid() };
|
||||
|
||||
var conflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x10, 0x20 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(30),
|
||||
statementIds,
|
||||
new BsonDocument("explanation", "first-pass"));
|
||||
|
||||
await store.InsertAsync(new[] { conflict }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetConflictsAsync(vulnerabilityKey, null, CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal(conflict.Id, results[0].Id);
|
||||
Assert.Equal(statementIds, results[0].StatementIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetConflicts_AsOfFilters()
|
||||
{
|
||||
var store = new AdvisoryConflictStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var earlyConflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x01 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(10),
|
||||
new[] { Guid.NewGuid() },
|
||||
new BsonDocument("stage", "early"));
|
||||
|
||||
var lateConflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x02 },
|
||||
baseTime.AddMinutes(10),
|
||||
baseTime.AddMinutes(10).AddSeconds(15),
|
||||
new[] { Guid.NewGuid() },
|
||||
new BsonDocument("stage", "late"));
|
||||
|
||||
await store.InsertAsync(new[] { earlyConflict, lateConflict }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetConflictsAsync(vulnerabilityKey, baseTime.AddMinutes(1), CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("early", results[0].Details["stage"].AsString);
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Statements;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStatementStoreTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
|
||||
public AdvisoryStatementStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndRetrieve_WritesImmutableStatements()
|
||||
{
|
||||
var store = new AdvisoryStatementStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var statements = new[]
|
||||
{
|
||||
new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x01 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(5),
|
||||
new BsonDocument("version", "A"),
|
||||
new[] { Guid.NewGuid() }),
|
||||
new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x02 },
|
||||
baseTime.AddMinutes(1),
|
||||
baseTime.AddMinutes(1).AddSeconds(5),
|
||||
new BsonDocument("version", "B"),
|
||||
Array.Empty<Guid>()),
|
||||
};
|
||||
|
||||
await store.InsertAsync(statements, CancellationToken.None);
|
||||
|
||||
var results = await store.GetStatementsAsync(vulnerabilityKey, null, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, results.Count);
|
||||
Assert.Equal(statements[1].Id, results[0].Id); // sorted by AsOf desc
|
||||
Assert.True(results.All(record => record.Payload.Contains("version")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatements_AsOfFiltersResults()
|
||||
{
|
||||
var store = new AdvisoryStatementStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var early = new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0xAA },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(10),
|
||||
new BsonDocument("state", "early"),
|
||||
Array.Empty<Guid>());
|
||||
|
||||
var late = new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0xBB },
|
||||
baseTime.AddMinutes(5),
|
||||
baseTime.AddMinutes(5).AddSeconds(10),
|
||||
new BsonDocument("state", "late"),
|
||||
Array.Empty<Guid>());
|
||||
|
||||
await store.InsertAsync(new[] { early, late }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetStatementsAsync(vulnerabilityKey, baseTime.AddMinutes(1), CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("early", results[0].Payload["state"].AsString);
|
||||
}
|
||||
}
|
||||
@@ -1,200 +0,0 @@
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStorePerformanceTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private const int LargeAdvisoryCount = 30;
|
||||
private const int AliasesPerAdvisory = 24;
|
||||
private const int ReferencesPerAdvisory = 180;
|
||||
private const int AffectedPackagesPerAdvisory = 140;
|
||||
private const int VersionRangesPerPackage = 4;
|
||||
private const int CvssMetricsPerAdvisory = 24;
|
||||
private const int ProvenanceEntriesPerAdvisory = 16;
|
||||
private static readonly string LargeSummary = new('A', 128 * 1024);
|
||||
private static readonly DateTimeOffset BasePublished = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
private static readonly DateTimeOffset BaseRecorded = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
private static readonly TimeSpan TotalBudget = TimeSpan.FromSeconds(28);
|
||||
private const double UpsertBudgetPerAdvisoryMs = 500;
|
||||
private const double FetchBudgetPerAdvisoryMs = 200;
|
||||
private const double FindBudgetPerAdvisoryMs = 200;
|
||||
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public AdvisoryStorePerformanceTests(MongoIntegrationFixture fixture, ITestOutputHelper output)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_output = output;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndQueryLargeAdvisories_CompletesWithinBudget()
|
||||
{
|
||||
var databaseName = $"concelier-performance-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var migrationRunner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
migrationRunner);
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var aliasStore = new AliasStore(database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(45));
|
||||
|
||||
// Warm up collections (indexes, serialization caches) so perf timings exclude one-time setup work.
|
||||
var warmup = CreateLargeAdvisory(-1);
|
||||
await store.UpsertAsync(warmup, cts.Token);
|
||||
_ = await store.FindAsync(warmup.AdvisoryKey, cts.Token);
|
||||
_ = await store.GetRecentAsync(1, cts.Token);
|
||||
|
||||
var advisories = Enumerable.Range(0, LargeAdvisoryCount)
|
||||
.Select(CreateLargeAdvisory)
|
||||
.ToArray();
|
||||
|
||||
var upsertWatch = Stopwatch.StartNew();
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await store.UpsertAsync(advisory, cts.Token);
|
||||
}
|
||||
|
||||
upsertWatch.Stop();
|
||||
var upsertPerAdvisory = upsertWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
var fetchWatch = Stopwatch.StartNew();
|
||||
var recent = await store.GetRecentAsync(LargeAdvisoryCount, cts.Token);
|
||||
fetchWatch.Stop();
|
||||
var fetchPerAdvisory = fetchWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
Assert.Equal(LargeAdvisoryCount, recent.Count);
|
||||
|
||||
var findWatch = Stopwatch.StartNew();
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var fetched = await store.FindAsync(advisory.AdvisoryKey, cts.Token);
|
||||
Assert.NotNull(fetched);
|
||||
}
|
||||
|
||||
findWatch.Stop();
|
||||
var findPerAdvisory = findWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
var totalElapsed = upsertWatch.Elapsed + fetchWatch.Elapsed + findWatch.Elapsed;
|
||||
|
||||
_output.WriteLine($"Upserted {LargeAdvisoryCount} large advisories in {upsertWatch.Elapsed} ({upsertPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Fetched recent advisories in {fetchWatch.Elapsed} ({fetchPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Looked up advisories individually in {findWatch.Elapsed} ({findPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Total elapsed {totalElapsed}.");
|
||||
|
||||
Assert.True(upsertPerAdvisory <= UpsertBudgetPerAdvisoryMs, $"Upsert exceeded {UpsertBudgetPerAdvisoryMs} ms per advisory: {upsertPerAdvisory:F2} ms.");
|
||||
Assert.True(fetchPerAdvisory <= FetchBudgetPerAdvisoryMs, $"GetRecent exceeded {FetchBudgetPerAdvisoryMs} ms per advisory: {fetchPerAdvisory:F2} ms.");
|
||||
Assert.True(findPerAdvisory <= FindBudgetPerAdvisoryMs, $"Find exceeded {FindBudgetPerAdvisoryMs} ms per advisory: {findPerAdvisory:F2} ms.");
|
||||
Assert.True(totalElapsed <= TotalBudget, $"Mongo advisory operations exceeded total budget {TotalBudget}: {totalElapsed}.");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static Advisory CreateLargeAdvisory(int index)
|
||||
{
|
||||
var baseKey = $"ADV-LARGE-{index:D4}";
|
||||
var published = BasePublished.AddDays(index);
|
||||
var modified = published.AddHours(6);
|
||||
|
||||
var aliases = Enumerable.Range(0, AliasesPerAdvisory)
|
||||
.Select(i => $"ALIAS-{baseKey}-{i:D4}")
|
||||
.ToArray();
|
||||
|
||||
var provenance = Enumerable.Range(0, ProvenanceEntriesPerAdvisory)
|
||||
.Select(i => new AdvisoryProvenance(
|
||||
source: i % 2 == 0 ? "nvd" : "vendor",
|
||||
kind: i % 3 == 0 ? "normalized" : "enriched",
|
||||
value: $"prov-{baseKey}-{i:D3}",
|
||||
recordedAt: BaseRecorded.AddDays(i)))
|
||||
.ToArray();
|
||||
|
||||
var references = Enumerable.Range(0, ReferencesPerAdvisory)
|
||||
.Select(i => new AdvisoryReference(
|
||||
url: $"https://vuln.example.com/{baseKey}/ref/{i:D4}",
|
||||
kind: i % 2 == 0 ? "advisory" : "article",
|
||||
sourceTag: $"tag-{i % 7}",
|
||||
summary: $"Reference {baseKey} #{i}",
|
||||
provenance: provenance[i % provenance.Length]))
|
||||
.ToArray();
|
||||
|
||||
var affectedPackages = Enumerable.Range(0, AffectedPackagesPerAdvisory)
|
||||
.Select(i => new AffectedPackage(
|
||||
type: i % 3 == 0 ? AffectedPackageTypes.Rpm : AffectedPackageTypes.Deb,
|
||||
identifier: $"pkg/{baseKey}/{i:D4}",
|
||||
platform: i % 4 == 0 ? "linux/x86_64" : "linux/aarch64",
|
||||
versionRanges: Enumerable.Range(0, VersionRangesPerPackage)
|
||||
.Select(r => new AffectedVersionRange(
|
||||
rangeKind: r % 2 == 0 ? "semver" : "evr",
|
||||
introducedVersion: $"1.{index}.{i}.{r}",
|
||||
fixedVersion: $"2.{index}.{i}.{r}",
|
||||
lastAffectedVersion: $"1.{index}.{i}.{r}",
|
||||
rangeExpression: $">=1.{index}.{i}.{r} <2.{index}.{i}.{r}",
|
||||
provenance: provenance[(i + r) % provenance.Length]))
|
||||
.ToArray(),
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[]
|
||||
{
|
||||
provenance[i % provenance.Length],
|
||||
provenance[(i + 3) % provenance.Length],
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
var cvssMetrics = Enumerable.Range(0, CvssMetricsPerAdvisory)
|
||||
.Select(i => new CvssMetric(
|
||||
version: i % 2 == 0 ? "3.1" : "2.0",
|
||||
vector: $"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:{(i % 3 == 0 ? "H" : "L")}",
|
||||
baseScore: Math.Max(0, 9.8 - i * 0.2),
|
||||
baseSeverity: i % 3 == 0 ? "critical" : "high",
|
||||
provenance: provenance[i % provenance.Length]))
|
||||
.ToArray();
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: baseKey,
|
||||
title: $"Large advisory {baseKey}",
|
||||
summary: LargeSummary,
|
||||
language: "en",
|
||||
published: published,
|
||||
modified: modified,
|
||||
severity: "critical",
|
||||
exploitKnown: index % 2 == 0,
|
||||
aliases: aliases,
|
||||
references: references,
|
||||
affectedPackages: affectedPackages,
|
||||
cvssMetrics: cvssMetrics,
|
||||
provenance: provenance);
|
||||
}
|
||||
}
|
||||
@@ -1,305 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AdvisoryStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndFetchAdvisory()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: "ADV-1",
|
||||
title: "Sample Advisory",
|
||||
summary: "Demo",
|
||||
language: "en",
|
||||
published: DateTimeOffset.UtcNow,
|
||||
modified: DateTimeOffset.UtcNow,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "ALIAS-1" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: Array.Empty<AdvisoryProvenance>());
|
||||
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var fetched = await store.FindAsync("ADV-1", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(advisory.AdvisoryKey, fetched!.AdvisoryKey);
|
||||
|
||||
var recent = await store.GetRecentAsync(5, CancellationToken.None);
|
||||
Assert.NotEmpty(recent);
|
||||
|
||||
var aliases = await aliasStore.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
|
||||
Assert.Contains(aliases, record => record.Scheme == AliasStoreConstants.PrimaryScheme && record.Value == "ADV-1");
|
||||
Assert.Contains(aliases, record => record.Value == "ALIAS-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RangePrimitives_RoundTripThroughMongo()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
|
||||
var recordedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var provenance = new AdvisoryProvenance("source-x", "mapper", "payload-123", recordedAt);
|
||||
var rangePrimitives = new RangePrimitives(
|
||||
new SemVerPrimitive(
|
||||
Introduced: "1.0.0",
|
||||
IntroducedInclusive: true,
|
||||
Fixed: "1.2.0",
|
||||
FixedInclusive: false,
|
||||
LastAffected: "1.1.5",
|
||||
LastAffectedInclusive: true,
|
||||
ConstraintExpression: ">=1.0.0 <1.2.0"),
|
||||
new NevraPrimitive(
|
||||
Introduced: new NevraComponent("pkg", 0, "1.0.0", "1", "x86_64"),
|
||||
Fixed: new NevraComponent("pkg", 1, "1.2.0", "2", "x86_64"),
|
||||
LastAffected: null),
|
||||
new EvrPrimitive(
|
||||
Introduced: new EvrComponent(1, "1.0.0", "1"),
|
||||
Fixed: null,
|
||||
LastAffected: new EvrComponent(1, "1.1.5", null)),
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["channel"] = "stable",
|
||||
["notesHash"] = "abc123",
|
||||
});
|
||||
|
||||
var versionRange = new AffectedVersionRange(
|
||||
rangeKind: "semver",
|
||||
introducedVersion: "1.0.0",
|
||||
fixedVersion: "1.2.0",
|
||||
lastAffectedVersion: "1.1.5",
|
||||
rangeExpression: ">=1.0.0 <1.2.0",
|
||||
provenance,
|
||||
rangePrimitives);
|
||||
|
||||
var affectedPackage = new AffectedPackage(
|
||||
type: "semver",
|
||||
identifier: "pkg@1.x",
|
||||
platform: "linux",
|
||||
versionRanges: new[] { versionRange },
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: "ADV-RANGE-1",
|
||||
title: "Sample Range Primitive",
|
||||
summary: "Testing range primitive persistence.",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-0001" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[] { affectedPackage },
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var fetched = await store.FindAsync("ADV-RANGE-1", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
var fetchedPackage = Assert.Single(fetched!.AffectedPackages);
|
||||
var fetchedRange = Assert.Single(fetchedPackage.VersionRanges);
|
||||
|
||||
Assert.Equal(versionRange.RangeKind, fetchedRange.RangeKind);
|
||||
Assert.Equal(versionRange.IntroducedVersion, fetchedRange.IntroducedVersion);
|
||||
Assert.Equal(versionRange.FixedVersion, fetchedRange.FixedVersion);
|
||||
Assert.Equal(versionRange.LastAffectedVersion, fetchedRange.LastAffectedVersion);
|
||||
Assert.Equal(versionRange.RangeExpression, fetchedRange.RangeExpression);
|
||||
Assert.Equal(versionRange.Provenance.Source, fetchedRange.Provenance.Source);
|
||||
Assert.Equal(versionRange.Provenance.Kind, fetchedRange.Provenance.Kind);
|
||||
Assert.Equal(versionRange.Provenance.Value, fetchedRange.Provenance.Value);
|
||||
Assert.Equal(versionRange.Provenance.DecisionReason, fetchedRange.Provenance.DecisionReason);
|
||||
Assert.Equal(versionRange.Provenance.RecordedAt, fetchedRange.Provenance.RecordedAt);
|
||||
Assert.True(versionRange.Provenance.FieldMask.SequenceEqual(fetchedRange.Provenance.FieldMask));
|
||||
|
||||
Assert.NotNull(fetchedRange.Primitives);
|
||||
Assert.Equal(rangePrimitives.SemVer, fetchedRange.Primitives!.SemVer);
|
||||
Assert.Equal(rangePrimitives.Nevra, fetchedRange.Primitives.Nevra);
|
||||
Assert.Equal(rangePrimitives.Evr, fetchedRange.Primitives.Evr);
|
||||
Assert.Equal(rangePrimitives.VendorExtensions, fetchedRange.Primitives.VendorExtensions);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_SkipsNormalizedVersionsWhenFeatureDisabled()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = false }),
|
||||
TimeProvider.System);
|
||||
|
||||
var advisory = CreateNormalizedAdvisory("ADV-NORM-DISABLED");
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var document = await _fixture.Database
|
||||
.GetCollection<AdvisoryDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Find(x => x.AdvisoryKey == advisory.AdvisoryKey)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(document);
|
||||
Assert.True(document!.NormalizedVersions is null || document.NormalizedVersions.Count == 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_PopulatesNormalizedVersionsWhenFeatureEnabled()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = true }),
|
||||
TimeProvider.System);
|
||||
|
||||
var advisory = CreateNormalizedAdvisory("ADV-NORM-ENABLED");
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var document = await _fixture.Database
|
||||
.GetCollection<AdvisoryDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Find(x => x.AdvisoryKey == advisory.AdvisoryKey)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(document);
|
||||
var normalizedCollection = document!.NormalizedVersions;
|
||||
Assert.NotNull(normalizedCollection);
|
||||
var normalized = Assert.Single(normalizedCollection!);
|
||||
Assert.Equal("pkg:npm/example", normalized.PackageId);
|
||||
Assert.Equal(AffectedPackageTypes.SemVer, normalized.PackageType);
|
||||
Assert.Equal(NormalizedVersionSchemes.SemVer, normalized.Scheme);
|
||||
Assert.Equal(NormalizedVersionRuleTypes.Range, normalized.Type);
|
||||
Assert.Equal("range", normalized.Style);
|
||||
Assert.Equal("1.0.0", normalized.Min);
|
||||
Assert.True(normalized.MinInclusive);
|
||||
Assert.Equal("2.0.0", normalized.Max);
|
||||
Assert.False(normalized.MaxInclusive);
|
||||
Assert.Null(normalized.Value);
|
||||
Assert.Equal("ghsa:pkg:npm/example", normalized.Notes);
|
||||
Assert.Equal("range-decision", normalized.DecisionReason);
|
||||
Assert.Equal(">= 1.0.0 < 2.0.0", normalized.Constraint);
|
||||
Assert.Equal("ghsa", normalized.Source);
|
||||
Assert.Equal(new DateTime(2025, 10, 9, 0, 0, 0, DateTimeKind.Utc), normalized.RecordedAtUtc);
|
||||
}
|
||||
|
||||
private static Advisory CreateNormalizedAdvisory(string advisoryKey)
|
||||
{
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 9, 0, 0, 0, TimeSpan.Zero);
|
||||
var rangeProvenance = new AdvisoryProvenance(
|
||||
source: "ghsa",
|
||||
kind: "affected-range",
|
||||
value: "pkg:npm/example",
|
||||
recordedAt: recordedAt,
|
||||
fieldMask: new[] { "affectedpackages[].versionranges[]" },
|
||||
decisionReason: "range-decision");
|
||||
|
||||
var semverPrimitive = new SemVerPrimitive(
|
||||
Introduced: "1.0.0",
|
||||
IntroducedInclusive: true,
|
||||
Fixed: "2.0.0",
|
||||
FixedInclusive: false,
|
||||
LastAffected: null,
|
||||
LastAffectedInclusive: false,
|
||||
ConstraintExpression: ">= 1.0.0 < 2.0.0");
|
||||
|
||||
var normalizedRule = semverPrimitive.ToNormalizedVersionRule("ghsa:pkg:npm/example")!;
|
||||
var versionRange = new AffectedVersionRange(
|
||||
rangeKind: "semver",
|
||||
introducedVersion: "1.0.0",
|
||||
fixedVersion: "2.0.0",
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: ">= 1.0.0 < 2.0.0",
|
||||
provenance: rangeProvenance,
|
||||
primitives: new RangePrimitives(semverPrimitive, null, null, null));
|
||||
|
||||
var package = new AffectedPackage(
|
||||
type: AffectedPackageTypes.SemVer,
|
||||
identifier: "pkg:npm/example",
|
||||
platform: "npm",
|
||||
versionRanges: new[] { versionRange },
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { rangeProvenance },
|
||||
normalizedVersions: new[] { normalizedRule });
|
||||
|
||||
var advisoryProvenance = new AdvisoryProvenance(
|
||||
source: "ghsa",
|
||||
kind: "document",
|
||||
value: advisoryKey,
|
||||
recordedAt: recordedAt,
|
||||
fieldMask: new[] { "advisory" },
|
||||
decisionReason: "document-decision");
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: advisoryKey,
|
||||
title: "Normalized advisory",
|
||||
summary: "Contains normalized versions for storage testing.",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { $"{advisoryKey}-ALIAS" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[] { package },
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { advisoryProvenance });
|
||||
}
|
||||
|
||||
private async Task DropCollectionAsync(string collectionName)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(collectionName);
|
||||
}
|
||||
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// ignore missing collection
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AliasStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AliasStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplaceAsync_UpsertsAliases_AndDetectsCollision()
|
||||
{
|
||||
await DropAliasCollectionAsync();
|
||||
var store = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
await store.ReplaceAsync(
|
||||
"ADV-1",
|
||||
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-1") },
|
||||
timestamp,
|
||||
CancellationToken.None);
|
||||
|
||||
var firstAliases = await store.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
|
||||
Assert.Contains(firstAliases, record => record.Scheme == "CVE" && record.Value == "CVE-2025-1234");
|
||||
|
||||
var result = await store.ReplaceAsync(
|
||||
"ADV-2",
|
||||
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-2") },
|
||||
timestamp.AddMinutes(1),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotEmpty(result.Collisions);
|
||||
var collision = Assert.Single(result.Collisions);
|
||||
Assert.Equal("CVE", collision.Scheme);
|
||||
Assert.Contains("ADV-1", collision.AdvisoryKeys);
|
||||
Assert.Contains("ADV-2", collision.AdvisoryKeys);
|
||||
}
|
||||
|
||||
private async Task DropAliasCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
}
|
||||
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,51 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class DocumentStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public DocumentStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndLookupDocument()
|
||||
{
|
||||
var store = new DocumentStore(_fixture.Database, NullLogger<DocumentStore>.Instance);
|
||||
var id = Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
id,
|
||||
"source",
|
||||
"https://example.com/advisory.json",
|
||||
DateTimeOffset.UtcNow,
|
||||
"sha123",
|
||||
"pending",
|
||||
"application/json",
|
||||
new Dictionary<string, string> { ["etag"] = "abc" },
|
||||
new Dictionary<string, string> { ["note"] = "test" },
|
||||
"etag-value",
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
DateTimeOffset.UtcNow.AddDays(30));
|
||||
|
||||
var upserted = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal(id, upserted.Id);
|
||||
|
||||
var fetched = await store.FindBySourceAndUriAsync("source", "https://example.com/advisory.json", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("pending", fetched!.Status);
|
||||
Assert.Equal("test", fetched.Metadata!["note"]);
|
||||
|
||||
var statusUpdated = await store.UpdateStatusAsync(id, "processed", CancellationToken.None);
|
||||
Assert.True(statusUpdated);
|
||||
|
||||
var refreshed = await store.FindAsync(id, CancellationToken.None);
|
||||
Assert.NotNull(refreshed);
|
||||
Assert.Equal("processed", refreshed!.Status);
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class DtoStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public DtoStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndLookupDto()
|
||||
{
|
||||
var store = new DtoStore(_fixture.Database, NullLogger<DtoStore>.Instance);
|
||||
var record = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
Guid.NewGuid(),
|
||||
"source",
|
||||
"1.0",
|
||||
new BsonDocument("value", 1),
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var upserted = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal(record.DocumentId, upserted.DocumentId);
|
||||
|
||||
var fetched = await store.FindByDocumentIdAsync(record.DocumentId, CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(1, fetched!.Payload["value"].AsInt32);
|
||||
|
||||
var bySource = await store.GetBySourceAsync("source", 10, CancellationToken.None);
|
||||
Assert.Single(bySource);
|
||||
Assert.Equal(record.DocumentId, bySource[0].DocumentId);
|
||||
}
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
public sealed class ExportStateManagerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task StoreFullExportInitializesBaseline()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
var record = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:abcd",
|
||||
cursor: "cursor-1",
|
||||
targetRepository: "registry.local/json",
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("export:json", record.Id);
|
||||
Assert.Equal("20240720T120000Z", record.BaseExportId);
|
||||
Assert.Equal("sha256:abcd", record.BaseDigest);
|
||||
Assert.Equal("sha256:abcd", record.LastFullDigest);
|
||||
Assert.Null(record.LastDeltaDigest);
|
||||
Assert.Equal("cursor-1", record.ExportCursor);
|
||||
Assert.Equal("registry.local/json", record.TargetRepository);
|
||||
Assert.Equal("1.0.0", record.ExporterVersion);
|
||||
Assert.Equal(timeProvider.Now, record.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreFullExport_ResetBaselineOverridesExisting()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:base",
|
||||
cursor: "cursor-base",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
var withoutReset = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120500Z",
|
||||
exportDigest: "sha256:new",
|
||||
cursor: "cursor-new",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.1",
|
||||
resetBaseline: false,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240720T120000Z", withoutReset.BaseExportId);
|
||||
Assert.Equal("sha256:base", withoutReset.BaseDigest);
|
||||
Assert.Equal("sha256:new", withoutReset.LastFullDigest);
|
||||
Assert.Equal("cursor-new", withoutReset.ExportCursor);
|
||||
Assert.Equal(timeProvider.Now, withoutReset.UpdatedAt);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
var reset = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T121000Z",
|
||||
exportDigest: "sha256:final",
|
||||
cursor: "cursor-final",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.2",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240720T121000Z", reset.BaseExportId);
|
||||
Assert.Equal("sha256:final", reset.BaseDigest);
|
||||
Assert.Equal("sha256:final", reset.LastFullDigest);
|
||||
Assert.Null(reset.LastDeltaDigest);
|
||||
Assert.Equal("cursor-final", reset.ExportCursor);
|
||||
Assert.Equal(timeProvider.Now, reset.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreFullExport_ResetsBaselineWhenRepositoryChanges()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-21T08:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240721T080000Z",
|
||||
exportDigest: "sha256:base",
|
||||
cursor: "cursor-base",
|
||||
targetRepository: "registry/v1/json",
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(10));
|
||||
var updated = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240721T081000Z",
|
||||
exportDigest: "sha256:new",
|
||||
cursor: "cursor-new",
|
||||
targetRepository: "registry/v2/json",
|
||||
exporterVersion: "1.1.0",
|
||||
resetBaseline: false,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240721T081000Z", updated.BaseExportId);
|
||||
Assert.Equal("sha256:new", updated.BaseDigest);
|
||||
Assert.Equal("sha256:new", updated.LastFullDigest);
|
||||
Assert.Equal("registry/v2/json", updated.TargetRepository);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreDeltaExportRequiresBaseline()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var manager = new ExportStateManager(store);
|
||||
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() => manager.StoreDeltaExportAsync(
|
||||
exporterId: "export:json",
|
||||
deltaDigest: "sha256:def",
|
||||
cursor: null,
|
||||
exporterVersion: "1.0.1",
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreDeltaExportUpdatesExistingState()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:abcd",
|
||||
cursor: "cursor-1",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(10));
|
||||
var delta = await manager.StoreDeltaExportAsync(
|
||||
exporterId: "export:json",
|
||||
deltaDigest: "sha256:ef01",
|
||||
cursor: "cursor-2",
|
||||
exporterVersion: "1.0.1",
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("sha256:ef01", delta.LastDeltaDigest);
|
||||
Assert.Equal("cursor-2", delta.ExportCursor);
|
||||
Assert.Equal("1.0.1", delta.ExporterVersion);
|
||||
Assert.Equal(timeProvider.Now, delta.UpdatedAt);
|
||||
Assert.Equal("sha256:abcd", delta.LastFullDigest);
|
||||
}
|
||||
|
||||
private sealed class InMemoryExportStateStore : IExportStateStore
|
||||
{
|
||||
private readonly Dictionary<string, ExportStateRecord> _records = new(StringComparer.Ordinal);
|
||||
|
||||
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(id, out var record);
|
||||
return Task.FromResult<ExportStateRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestTimeProvider : TimeProvider
|
||||
{
|
||||
public TestTimeProvider(DateTimeOffset start) => Now = start;
|
||||
|
||||
public DateTimeOffset Now { get; private set; }
|
||||
|
||||
public void Advance(TimeSpan delta) => Now = Now.Add(delta);
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => Now;
|
||||
}
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class ExportStateStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public ExportStateStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndFetchExportState()
|
||||
{
|
||||
var store = new ExportStateStore(_fixture.Database, NullLogger<ExportStateStore>.Instance);
|
||||
var record = new ExportStateRecord(
|
||||
Id: "json",
|
||||
BaseExportId: "base",
|
||||
BaseDigest: "sha-base",
|
||||
LastFullDigest: "sha-full",
|
||||
LastDeltaDigest: null,
|
||||
ExportCursor: "cursor",
|
||||
TargetRepository: "repo",
|
||||
ExporterVersion: "1.0",
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
Files: Array.Empty<ExportFileRecord>());
|
||||
|
||||
var saved = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal("json", saved.Id);
|
||||
Assert.Empty(saved.Files);
|
||||
|
||||
var fetched = await store.FindAsync("json", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("sha-full", fetched!.LastFullDigest);
|
||||
Assert.Empty(fetched.Files);
|
||||
}
|
||||
}
|
||||
@@ -1,174 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Linksets;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Linksets;
|
||||
|
||||
public sealed class ConcelierMongoLinksetStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public ConcelierMongoLinksetStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MapToDocument_StoresConfidenceAndConflicts()
|
||||
{
|
||||
var linkset = new AdvisoryLinkset(
|
||||
"tenant",
|
||||
"ghsa",
|
||||
"GHSA-1234",
|
||||
ImmutableArray.Create("obs-1", "obs-2"),
|
||||
null,
|
||||
new AdvisoryLinksetProvenance(new[] { "h1", "h2" }, "tool", "policy"),
|
||||
0.82,
|
||||
new List<AdvisoryLinksetConflict>
|
||||
{
|
||||
new("severity", "disagree", new[] { "HIGH", "MEDIUM" }, new[] { "source-a", "source-b" })
|
||||
},
|
||||
DateTimeOffset.UtcNow,
|
||||
"job-1");
|
||||
|
||||
var method = typeof(ConcelierMongoLinksetStore).GetMethod(
|
||||
"MapToDocument",
|
||||
BindingFlags.NonPublic | BindingFlags.Static);
|
||||
|
||||
Assert.NotNull(method);
|
||||
|
||||
var document = (AdvisoryLinksetDocument)method!.Invoke(null, new object?[] { linkset })!;
|
||||
|
||||
Assert.Equal(linkset.Confidence, document.Confidence);
|
||||
Assert.NotNull(document.Conflicts);
|
||||
Assert.Single(document.Conflicts!);
|
||||
Assert.Equal("severity", document.Conflicts![0].Field);
|
||||
Assert.Equal("disagree", document.Conflicts![0].Reason);
|
||||
Assert.Equal(new[] { "source-a", "source-b" }, document.Conflicts![0].SourceIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromDocument_RestoresConfidenceAndConflicts()
|
||||
{
|
||||
var doc = new AdvisoryLinksetDocument
|
||||
{
|
||||
TenantId = "tenant",
|
||||
Source = "ghsa",
|
||||
AdvisoryId = "GHSA-1234",
|
||||
Observations = new List<string> { "obs-1" },
|
||||
Confidence = 0.5,
|
||||
Conflicts = new List<AdvisoryLinksetConflictDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Field = "references",
|
||||
Reason = "mismatch",
|
||||
Values = new List<string> { "url1", "url2" },
|
||||
SourceIds = new List<string> { "src-a", "src-b" }
|
||||
}
|
||||
},
|
||||
CreatedAt = DateTime.UtcNow
|
||||
};
|
||||
|
||||
var method = typeof(ConcelierMongoLinksetStore).GetMethod(
|
||||
"FromDocument",
|
||||
BindingFlags.NonPublic | BindingFlags.Static);
|
||||
|
||||
Assert.NotNull(method);
|
||||
|
||||
var model = (AdvisoryLinkset)method!.Invoke(null, new object?[] { doc })!;
|
||||
|
||||
Assert.Equal(0.5, model.Confidence);
|
||||
Assert.NotNull(model.Conflicts);
|
||||
Assert.Single(model.Conflicts!);
|
||||
Assert.Equal("references", model.Conflicts![0].Field);
|
||||
Assert.Equal(new[] { "src-a", "src-b" }, model.Conflicts![0].SourceIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByTenantAsync_OrdersByCreatedAtThenAdvisoryId()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var store = new ConcelierMongoLinksetStore(collection);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var linksets = new[]
|
||||
{
|
||||
new AdvisoryLinkset("Tenant-A", "src", "ADV-002", ImmutableArray.Create("obs-1"), null, null, null, null, now, "job-1"),
|
||||
new AdvisoryLinkset("Tenant-A", "src", "ADV-001", ImmutableArray.Create("obs-2"), null, null, null, null, now, "job-2"),
|
||||
new AdvisoryLinkset("Tenant-A", "src", "ADV-003", ImmutableArray.Create("obs-3"), null, null, null, null, now.AddMinutes(-5), "job-3")
|
||||
};
|
||||
|
||||
foreach (var linkset in linksets)
|
||||
{
|
||||
await store.UpsertAsync(linkset, CancellationToken.None);
|
||||
}
|
||||
|
||||
var results = await store.FindByTenantAsync("TENANT-A", null, null, cursor: null, limit: 10, cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal(new[] { "ADV-001", "ADV-002", "ADV-003" }, results.Select(r => r.AdvisoryId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByTenantAsync_AppliesCursorForDeterministicPaging()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var store = new ConcelierMongoLinksetStore(collection);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var firstPage = new[]
|
||||
{
|
||||
new AdvisoryLinkset("tenant-a", "src", "ADV-010", ImmutableArray.Create("obs-1"), null, null, null, null, now, "job-1"),
|
||||
new AdvisoryLinkset("tenant-a", "src", "ADV-020", ImmutableArray.Create("obs-2"), null, null, null, null, now, "job-2"),
|
||||
new AdvisoryLinkset("tenant-a", "src", "ADV-030", ImmutableArray.Create("obs-3"), null, null, null, null, now.AddMinutes(-10), "job-3")
|
||||
};
|
||||
|
||||
foreach (var linkset in firstPage)
|
||||
{
|
||||
await store.UpsertAsync(linkset, CancellationToken.None);
|
||||
}
|
||||
|
||||
var initial = await store.FindByTenantAsync("tenant-a", null, null, cursor: null, limit: 10, cancellationToken: CancellationToken.None);
|
||||
var cursor = new AdvisoryLinksetCursor(initial[1].CreatedAt, initial[1].AdvisoryId);
|
||||
|
||||
var paged = await store.FindByTenantAsync("tenant-a", null, null, cursor, limit: 10, cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Single(paged);
|
||||
Assert.Equal("ADV-030", paged[0].AdvisoryId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Upsert_NormalizesTenantToLowerInvariant()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryLinksetDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var store = new ConcelierMongoLinksetStore(collection);
|
||||
|
||||
var linkset = new AdvisoryLinkset("Tenant-A", "ghsa", "GHSA-1", ImmutableArray.Create("obs-1"), null, null, null, null, DateTimeOffset.UtcNow, "job-1");
|
||||
await store.UpsertAsync(linkset, CancellationToken.None);
|
||||
|
||||
var fetched = await collection.Find(Builders<AdvisoryLinksetDocument>.Filter.Empty).FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("tenant-a", fetched!.TenantId);
|
||||
|
||||
var results = await store.FindByTenantAsync("TENANT-A", null, null, cursor: null, limit: 10, cancellationToken: CancellationToken.None);
|
||||
Assert.Single(results);
|
||||
Assert.Equal("GHSA-1", results[0].AdvisoryId);
|
||||
}
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MergeEventStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MergeEventStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AppendAndReadMergeEvents()
|
||||
{
|
||||
var store = new MergeEventStore(_fixture.Database, NullLogger<MergeEventStore>.Instance);
|
||||
var record = new MergeEventRecord(
|
||||
Guid.NewGuid(),
|
||||
"ADV-1",
|
||||
new byte[] { 0x01 },
|
||||
new byte[] { 0x02 },
|
||||
DateTimeOffset.UtcNow,
|
||||
new List<Guid> { Guid.NewGuid() },
|
||||
Array.Empty<MergeFieldDecision>());
|
||||
|
||||
await store.AppendAsync(record, CancellationToken.None);
|
||||
|
||||
var recent = await store.GetRecentAsync("ADV-1", 10, CancellationToken.None);
|
||||
Assert.Single(recent);
|
||||
Assert.Equal(record.AfterHash, recent[0].AfterHash);
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class EnsureAdvisoryLinksetsTenantLowerMigrationTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public EnsureAdvisoryLinksetsTenantLowerMigrationTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_LowersTenantIds()
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
var collection = _fixture.Database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
|
||||
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
new BsonDocument { { "TenantId", "Tenant-A" }, { "Source", "src" }, { "AdvisoryId", "ADV-1" }, { "Observations", new BsonArray() } },
|
||||
new BsonDocument { { "TenantId", "tenant-b" }, { "Source", "src" }, { "AdvisoryId", "ADV-2" }, { "Observations", new BsonArray() } },
|
||||
new BsonDocument { { "Source", "src" }, { "AdvisoryId", "ADV-3" }, { "Observations", new BsonArray() } } // missing tenant should be ignored
|
||||
});
|
||||
|
||||
var migration = new EnsureAdvisoryLinksetsTenantLowerMigration();
|
||||
await migration.ApplyAsync(_fixture.Database, default);
|
||||
|
||||
var all = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
|
||||
Assert.Contains(all, doc => doc["TenantId"] == "tenant-a");
|
||||
Assert.Contains(all, doc => doc["TenantId"] == "tenant-b");
|
||||
}
|
||||
}
|
||||
@@ -1,346 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.RawModels;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Raw;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class EnsureAdvisoryObservationsRawLinksetMigrationTests
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public EnsureAdvisoryObservationsRawLinksetMigrationTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_BackfillsRawLinksetFromRawDocument()
|
||||
{
|
||||
var databaseName = $"concelier-rawlinkset-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
|
||||
try
|
||||
{
|
||||
var rawRepository = new MongoAdvisoryRawRepository(
|
||||
database,
|
||||
TimeProvider.System,
|
||||
NullLogger<MongoAdvisoryRawRepository>.Instance);
|
||||
|
||||
var rawDocument = RawDocumentFactory.CreateAdvisory(
|
||||
tenant: "tenant-a",
|
||||
source: new RawSourceMetadata("Vendor-X", "connector-y", "1.0.0", "stable"),
|
||||
upstream: new RawUpstreamMetadata(
|
||||
UpstreamId: "GHSA-2025-0001",
|
||||
DocumentVersion: "v1",
|
||||
RetrievedAt: DateTimeOffset.Parse("2025-10-29T12:34:56Z"),
|
||||
ContentHash: "sha256:abc123",
|
||||
Signature: new RawSignatureMetadata(true, "dsse", "key1", "sig1"),
|
||||
Provenance: ImmutableDictionary.CreateRange(new[] { new KeyValuePair<string, string>("api", "https://example.test/api") })),
|
||||
content: new RawContent(
|
||||
Format: "OSV",
|
||||
SpecVersion: "1.0.0",
|
||||
Raw: ParseJsonElement("""{"id":"GHSA-2025-0001"}"""),
|
||||
Encoding: null),
|
||||
identifiers: new RawIdentifiers(
|
||||
Aliases: ImmutableArray.Create("CVE-2025-0001", "cve-2025-0001"),
|
||||
PrimaryId: "CVE-2025-0001"),
|
||||
linkset: new RawLinkset
|
||||
{
|
||||
Aliases = ImmutableArray.Create("GHSA-xxxx-yyyy"),
|
||||
PackageUrls = ImmutableArray.Create("pkg:npm/example@1.0.0"),
|
||||
Cpes = ImmutableArray.Create("cpe:/a:example:product:1.0"),
|
||||
References = ImmutableArray.Create(new RawReference("advisory", "https://example.test/advisory", "vendor")),
|
||||
ReconciledFrom = ImmutableArray.Create("connector-y"),
|
||||
Notes = ImmutableDictionary.CreateRange(new[] { new KeyValuePair<string, string>("range-fixed", "1.0.1") })
|
||||
},
|
||||
advisoryKey: "CVE-2025-0001",
|
||||
links: ImmutableArray.Create(
|
||||
new RawLink("CVE", "CVE-2025-0001"),
|
||||
new RawLink("GHSA", "GHSA-2025-0001"),
|
||||
new RawLink("PRIMARY", "CVE-2025-0001")));
|
||||
|
||||
await rawRepository.UpsertAsync(rawDocument, CancellationToken.None);
|
||||
|
||||
var expectedRawLinkset = BuildRawLinkset(rawDocument.Identifiers, rawDocument.Linkset);
|
||||
var canonicalAliases = ImmutableArray.Create("cve-2025-0001", "ghsa-xxxx-yyyy");
|
||||
var canonicalPurls = rawDocument.Linkset.PackageUrls;
|
||||
var canonicalCpes = rawDocument.Linkset.Cpes;
|
||||
var canonicalReferences = rawDocument.Linkset.References;
|
||||
|
||||
var observationId = "tenant-a:vendor-x:ghsa-2025-0001:sha256-abc123";
|
||||
var observationBson = BuildObservationDocument(
|
||||
observationId,
|
||||
rawDocument,
|
||||
canonicalAliases,
|
||||
canonicalPurls,
|
||||
canonicalCpes,
|
||||
canonicalReferences,
|
||||
rawDocument.Upstream.RetrievedAt,
|
||||
includeRawLinkset: false);
|
||||
await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations)
|
||||
.InsertOneAsync(observationBson);
|
||||
|
||||
var migration = new EnsureAdvisoryObservationsRawLinksetMigration();
|
||||
await migration.ApplyAsync(database, CancellationToken.None);
|
||||
|
||||
var storedBson = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations)
|
||||
.Find(Builders<BsonDocument>.Filter.Eq("_id", observationId))
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(storedBson);
|
||||
Assert.True(storedBson.TryGetValue("rawLinkset", out var rawLinksetValue));
|
||||
|
||||
var storedDocument = BsonSerializer.Deserialize<AdvisoryObservationDocument>(storedBson);
|
||||
var storedObservation = AdvisoryObservationDocumentFactory.ToModel(storedDocument);
|
||||
|
||||
Assert.True(expectedRawLinkset.Aliases.SequenceEqual(storedObservation.RawLinkset.Aliases, StringComparer.Ordinal));
|
||||
Assert.True(expectedRawLinkset.PackageUrls.SequenceEqual(storedObservation.RawLinkset.PackageUrls, StringComparer.Ordinal));
|
||||
Assert.True(expectedRawLinkset.Cpes.SequenceEqual(storedObservation.RawLinkset.Cpes, StringComparer.Ordinal));
|
||||
Assert.True(expectedRawLinkset.References.SequenceEqual(storedObservation.RawLinkset.References));
|
||||
Assert.Equal(expectedRawLinkset.Notes, storedObservation.RawLinkset.Notes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_ThrowsWhenRawDocumentMissing()
|
||||
{
|
||||
var databaseName = $"concelier-rawlinkset-missing-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
|
||||
try
|
||||
{
|
||||
var rawDocument = RawDocumentFactory.CreateAdvisory(
|
||||
tenant: "tenant-b",
|
||||
source: new RawSourceMetadata("Vendor-Y", "connector-z", "2.0.0", "stable"),
|
||||
upstream: new RawUpstreamMetadata(
|
||||
UpstreamId: "GHSA-9999-0001",
|
||||
DocumentVersion: "v2",
|
||||
RetrievedAt: DateTimeOffset.Parse("2025-10-30T00:00:00Z"),
|
||||
ContentHash: "sha256:def456",
|
||||
Signature: new RawSignatureMetadata(false),
|
||||
Provenance: ImmutableDictionary<string, string>.Empty),
|
||||
content: new RawContent(
|
||||
Format: "OSV",
|
||||
SpecVersion: "1.0.0",
|
||||
Raw: ParseJsonElement("""{"id":"GHSA-9999-0001"}"""),
|
||||
Encoding: null),
|
||||
identifiers: new RawIdentifiers(
|
||||
Aliases: ImmutableArray<string>.Empty,
|
||||
PrimaryId: "GHSA-9999-0001"),
|
||||
linkset: new RawLinkset(),
|
||||
advisoryKey: "GHSA-9999-0001",
|
||||
links: ImmutableArray.Create(
|
||||
new RawLink("GHSA", "GHSA-9999-0001"),
|
||||
new RawLink("PRIMARY", "GHSA-9999-0001")));
|
||||
|
||||
var observationId = "tenant-b:vendor-y:ghsa-9999-0001:sha256-def456";
|
||||
var document = BuildObservationDocument(
|
||||
observationId,
|
||||
rawDocument,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<RawReference>.Empty,
|
||||
rawDocument.Upstream.RetrievedAt,
|
||||
includeRawLinkset: false);
|
||||
|
||||
await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations)
|
||||
.InsertOneAsync(document);
|
||||
|
||||
var migration = new EnsureAdvisoryObservationsRawLinksetMigration();
|
||||
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(
|
||||
() => migration.ApplyAsync(database, CancellationToken.None));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static BsonDocument BuildObservationDocument(
|
||||
string observationId,
|
||||
AdvisoryRawDocument rawDocument,
|
||||
ImmutableArray<string> canonicalAliases,
|
||||
ImmutableArray<string> canonicalPurls,
|
||||
ImmutableArray<string> canonicalCpes,
|
||||
ImmutableArray<RawReference> canonicalReferences,
|
||||
DateTimeOffset createdAt,
|
||||
bool includeRawLinkset,
|
||||
RawLinkset? rawLinkset = null)
|
||||
{
|
||||
var sourceDocument = new BsonDocument
|
||||
{
|
||||
{ "vendor", rawDocument.Source.Vendor },
|
||||
{ "stream", string.IsNullOrWhiteSpace(rawDocument.Source.Stream) ? rawDocument.Source.Connector : rawDocument.Source.Stream! },
|
||||
{ "api", rawDocument.Upstream.Provenance.TryGetValue("api", out var api) ? api : rawDocument.Source.Connector }
|
||||
};
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Source.ConnectorVersion))
|
||||
{
|
||||
sourceDocument["collectorVersion"] = rawDocument.Source.ConnectorVersion;
|
||||
}
|
||||
|
||||
var signatureDocument = new BsonDocument
|
||||
{
|
||||
{ "present", rawDocument.Upstream.Signature.Present }
|
||||
};
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Upstream.Signature.Format))
|
||||
{
|
||||
signatureDocument["format"] = rawDocument.Upstream.Signature.Format;
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Upstream.Signature.KeyId))
|
||||
{
|
||||
signatureDocument["keyId"] = rawDocument.Upstream.Signature.KeyId;
|
||||
}
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Upstream.Signature.Signature))
|
||||
{
|
||||
signatureDocument["signature"] = rawDocument.Upstream.Signature.Signature;
|
||||
}
|
||||
|
||||
var upstreamDocument = new BsonDocument
|
||||
{
|
||||
{ "upstream_id", rawDocument.Upstream.UpstreamId },
|
||||
{ "document_version", rawDocument.Upstream.DocumentVersion },
|
||||
{ "fetchedAt", rawDocument.Upstream.RetrievedAt.UtcDateTime },
|
||||
{ "receivedAt", rawDocument.Upstream.RetrievedAt.UtcDateTime },
|
||||
{ "contentHash", rawDocument.Upstream.ContentHash },
|
||||
{ "signature", signatureDocument },
|
||||
{ "metadata", new BsonDocument(rawDocument.Upstream.Provenance) }
|
||||
};
|
||||
|
||||
var contentDocument = new BsonDocument
|
||||
{
|
||||
{ "format", rawDocument.Content.Format },
|
||||
{ "raw", BsonDocument.Parse(rawDocument.Content.Raw.GetRawText()) }
|
||||
};
|
||||
if (!string.IsNullOrWhiteSpace(rawDocument.Content.SpecVersion))
|
||||
{
|
||||
contentDocument["specVersion"] = rawDocument.Content.SpecVersion;
|
||||
}
|
||||
|
||||
var canonicalLinkset = new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray(canonicalAliases) },
|
||||
{ "purls", new BsonArray(canonicalPurls) },
|
||||
{ "cpes", new BsonArray(canonicalCpes) },
|
||||
{ "references", new BsonArray(canonicalReferences.Select(reference => new BsonDocument
|
||||
{
|
||||
{ "type", reference.Type },
|
||||
{ "url", reference.Url }
|
||||
})) }
|
||||
};
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
{ "_id", observationId },
|
||||
{ "tenant", rawDocument.Tenant },
|
||||
{ "source", sourceDocument },
|
||||
{ "upstream", upstreamDocument },
|
||||
{ "content", contentDocument },
|
||||
{ "linkset", canonicalLinkset },
|
||||
{ "createdAt", createdAt.UtcDateTime },
|
||||
{ "attributes", new BsonDocument() }
|
||||
};
|
||||
|
||||
if (includeRawLinkset)
|
||||
{
|
||||
var actualRawLinkset = rawLinkset ?? throw new ArgumentNullException(nameof(rawLinkset));
|
||||
document["rawLinkset"] = new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray(actualRawLinkset.Aliases) },
|
||||
{ "purls", new BsonArray(actualRawLinkset.PackageUrls) },
|
||||
{ "cpes", new BsonArray(actualRawLinkset.Cpes) },
|
||||
{ "references", new BsonArray(actualRawLinkset.References.Select(reference => new BsonDocument
|
||||
{
|
||||
{ "type", reference.Type },
|
||||
{ "url", reference.Url },
|
||||
{ "source", reference.Source }
|
||||
})) },
|
||||
{ "reconciled_from", new BsonArray(actualRawLinkset.ReconciledFrom) },
|
||||
{ "notes", new BsonDocument(actualRawLinkset.Notes) }
|
||||
};
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
private static JsonElement ParseJsonElement(string json)
|
||||
{
|
||||
using var document = JsonDocument.Parse(json);
|
||||
return document.RootElement.Clone();
|
||||
}
|
||||
|
||||
private static RawLinkset BuildRawLinkset(RawIdentifiers identifiers, RawLinkset linkset)
|
||||
{
|
||||
var aliasBuilder = ImmutableArray.CreateBuilder<string>();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(identifiers.PrimaryId))
|
||||
{
|
||||
aliasBuilder.Add(identifiers.PrimaryId);
|
||||
}
|
||||
|
||||
if (!identifiers.Aliases.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var alias in identifiers.Aliases)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(alias))
|
||||
{
|
||||
aliasBuilder.Add(alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!linkset.Aliases.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var alias in linkset.Aliases)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(alias))
|
||||
{
|
||||
aliasBuilder.Add(alias);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static ImmutableArray<string> EnsureArray(ImmutableArray<string> values)
|
||||
=> values.IsDefault ? ImmutableArray<string>.Empty : values;
|
||||
|
||||
static ImmutableArray<RawReference> EnsureReferences(ImmutableArray<RawReference> values)
|
||||
=> values.IsDefault ? ImmutableArray<RawReference>.Empty : values;
|
||||
|
||||
return linkset with
|
||||
{
|
||||
Aliases = aliasBuilder.ToImmutable(),
|
||||
PackageUrls = EnsureArray(linkset.PackageUrls),
|
||||
Cpes = EnsureArray(linkset.Cpes),
|
||||
References = EnsureReferences(linkset.References),
|
||||
ReconciledFrom = EnsureArray(linkset.ReconciledFrom),
|
||||
Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,706 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoMigrationRunnerTests
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoMigrationRunnerTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_AppliesPendingMigrationsOnce()
|
||||
{
|
||||
var databaseName = $"concelier-migrations-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new TestMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, migration.ApplyCount);
|
||||
|
||||
var count = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Migrations)
|
||||
.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
|
||||
Assert.Equal(1, count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureDocumentExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-doc-ttl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var options = Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.FromDays(45),
|
||||
RawDocumentRetentionTtlGrace = TimeSpan.FromHours(12),
|
||||
});
|
||||
|
||||
var migration = new EnsureDocumentExpiryIndexesMigration(options);
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document)
|
||||
.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
var ttlIndex = indexList.Single(x => x["name"].AsString == "document_expiresAt_ttl");
|
||||
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
|
||||
Assert.True(ttlIndex["partialFilterExpression"].AsBsonDocument["expiresAt"].AsBsonDocument["$exists"].ToBoolean());
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureDocumentExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-doc-notl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document);
|
||||
var keys = Builders<BsonDocument>.IndexKeys.Ascending("expiresAt");
|
||||
var options = new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "document_expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero,
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("expiresAt", true),
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
|
||||
|
||||
var migration = new EnsureDocumentExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.Zero,
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await collection.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
Assert.DoesNotContain(indexList, x => x["name"].AsString == "document_expiresAt_ttl");
|
||||
var nonTtl = indexList.Single(x => x["name"].AsString == "document_expiresAt");
|
||||
Assert.False(nonTtl.Contains("expireAfterSeconds"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureGridFsExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-gridfs-ttl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("documents.files");
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.FromDays(30),
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await database.GetCollection<BsonDocument>("documents.files").Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
var ttlIndex = indexList.Single(x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
|
||||
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureGridFsExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-gridfs-notl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("documents.files");
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>("documents.files");
|
||||
var keys = Builders<BsonDocument>.IndexKeys.Ascending("metadata.expiresAt");
|
||||
var options = new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "gridfs_files_expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero,
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("metadata.expiresAt", true),
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
|
||||
|
||||
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.Zero,
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await collection.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
Assert.DoesNotContain(indexList, x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryEventCollectionsMigration_CreatesIndexes()
|
||||
{
|
||||
var databaseName = $"concelier-advisory-events-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryStatements);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryConflicts);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisoryEventCollectionsMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var statementIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var statementIndexNames = (await statementIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_statements_vulnerability_asof_desc", statementIndexNames);
|
||||
Assert.Contains("advisory_statements_statementHash_unique", statementIndexNames);
|
||||
|
||||
var conflictIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryConflicts)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var conflictIndexNames = (await conflictIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_conflicts_vulnerability_asof_desc", conflictIndexNames);
|
||||
Assert.Contains("advisory_conflicts_conflictHash_unique", conflictIndexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestMigration : IMongoMigration
|
||||
{
|
||||
public int ApplyCount { get; private set; }
|
||||
|
||||
public string Id => "999_test";
|
||||
|
||||
public string Description => "test migration";
|
||||
|
||||
public Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||
{
|
||||
ApplyCount++;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawValidatorMigration_AppliesSchemaWithDefaultOptions()
|
||||
{
|
||||
var databaseName = $"concelier-advisory-validator-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisoryRawValidatorMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
AdvisoryRawValidator = new MongoCollectionValidatorOptions
|
||||
{
|
||||
Level = MongoValidationLevel.Moderate,
|
||||
Action = MongoValidationAction.Warn,
|
||||
},
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var collectionInfo = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
var options = collectionInfo["options"].AsBsonDocument;
|
||||
|
||||
Assert.Equal("moderate", options["validationLevel"].AsString);
|
||||
Assert.Equal("warn", options["validationAction"].AsString);
|
||||
|
||||
var schema = options["validator"]["$jsonSchema"].AsBsonDocument;
|
||||
var required = schema["required"].AsBsonArray.Select(x => x.AsString).ToArray();
|
||||
Assert.Contains("tenant", required);
|
||||
Assert.Contains("source", required);
|
||||
Assert.Contains("upstream", required);
|
||||
Assert.Contains("content", required);
|
||||
Assert.Contains("linkset", required);
|
||||
|
||||
var patternProperties = schema["patternProperties"].AsBsonDocument;
|
||||
Assert.True(patternProperties.Contains("^(?i)(severity|cvss|cvss_vector|merged_from|consensus_provider|reachability|asset_criticality|risk_score)$"));
|
||||
Assert.True(patternProperties.Contains("^(?i)effective_"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawValidatorMigration_HonorsValidationToggles()
|
||||
{
|
||||
var databaseName = $"advraw-validator-off-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
// Pre-create collection to exercise collMod path.
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
var migration = new EnsureAdvisoryRawValidatorMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
AdvisoryRawValidator = new MongoCollectionValidatorOptions
|
||||
{
|
||||
Level = MongoValidationLevel.Off,
|
||||
Action = MongoValidationAction.Error,
|
||||
},
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var collectionInfo = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
var options = collectionInfo["options"].AsBsonDocument;
|
||||
|
||||
Assert.Equal("off", options["validationLevel"].AsString);
|
||||
Assert.Equal("error", options["validationAction"].AsString);
|
||||
Assert.True(options.Contains("validator"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawIdempotencyIndexMigration_CreatesUniqueIndex()
|
||||
{
|
||||
var databaseName = $"advraw-idx-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
await collection.InsertOneAsync(
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:alpha:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "ALPHA",
|
||||
contentHash: "sha256:abc",
|
||||
tenant: "tenant-a",
|
||||
retrievedAt: new DateTime(2025, 1, 1, 0, 0, 0, DateTimeKind.Utc)));
|
||||
|
||||
var migration = new EnsureAdvisoryRawIdempotencyIndexMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
using var cursor = await collection.Indexes.ListAsync();
|
||||
var indexes = await cursor.ToListAsync();
|
||||
var idempotencyIndex = indexes.Single(x => x["name"].AsString == "advisory_raw_idempotency");
|
||||
|
||||
Assert.True(idempotencyIndex["unique"].ToBoolean());
|
||||
|
||||
var key = idempotencyIndex["key"].AsBsonDocument;
|
||||
Assert.Collection(
|
||||
key.Elements,
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("source.vendor", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
},
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("upstream.upstream_id", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
},
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("upstream.content_hash", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
},
|
||||
element =>
|
||||
{
|
||||
Assert.Equal("tenant", element.Name);
|
||||
Assert.Equal(1, element.Value.AsInt32);
|
||||
});
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryRawIdempotencyIndexMigration_ThrowsWhenDuplicatesExist()
|
||||
{
|
||||
var databaseName = $"advraw-idx-dup-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:beta:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "BETA",
|
||||
contentHash: "sha256:def",
|
||||
tenant: "tenant-b",
|
||||
retrievedAt: new DateTime(2025, 2, 1, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:beta:v2",
|
||||
vendor: "test",
|
||||
upstreamId: "BETA",
|
||||
contentHash: "sha256:def",
|
||||
tenant: "tenant-b",
|
||||
retrievedAt: new DateTime(2025, 2, 2, 0, 0, 0, DateTimeKind.Utc)),
|
||||
});
|
||||
|
||||
var migration = new EnsureAdvisoryRawIdempotencyIndexMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var exception = await Assert.ThrowsAsync<InvalidOperationException>(() => runner.RunAsync(CancellationToken.None));
|
||||
Assert.Contains("duplicate", exception.Message, StringComparison.OrdinalIgnoreCase);
|
||||
Assert.Contains("advisory_raw", exception.Message, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisorySupersedesBackfillMigration_BackfillsSupersedesAndCreatesView()
|
||||
{
|
||||
var databaseName = $"advraw-supersedes-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.InsertOneAsync(new BsonDocument("advisoryKey", "legacy"), cancellationToken: CancellationToken.None);
|
||||
|
||||
var rawCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
await rawCollection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:gamma:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "GAMMA",
|
||||
contentHash: "sha256:111",
|
||||
tenant: "tenant-c",
|
||||
retrievedAt: new DateTime(2024, 12, 1, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:gamma:v2",
|
||||
vendor: "test",
|
||||
upstreamId: "GAMMA",
|
||||
contentHash: "sha256:222",
|
||||
tenant: "tenant-c",
|
||||
retrievedAt: new DateTime(2024, 12, 10, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:gamma:v3",
|
||||
vendor: "test",
|
||||
upstreamId: "GAMMA",
|
||||
contentHash: "sha256:333",
|
||||
tenant: "tenant-c",
|
||||
retrievedAt: new DateTime(2024, 12, 20, 0, 0, 0, DateTimeKind.Utc)),
|
||||
});
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisorySupersedesBackfillMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var info = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.Advisory);
|
||||
Assert.NotNull(info);
|
||||
Assert.Equal("view", info!["type"].AsString);
|
||||
Assert.True(ViewTargets(info!, "advisory_backup_20251028"));
|
||||
|
||||
var docs = await rawCollection
|
||||
.Find(Builders<BsonDocument>.Filter.Empty)
|
||||
.Sort(Builders<BsonDocument>.Sort.Ascending("_id"))
|
||||
.ToListAsync();
|
||||
|
||||
Assert.Equal(BsonNull.Value, docs[0].GetValue("supersedes", BsonNull.Value));
|
||||
Assert.Equal("advisory_raw:test:gamma:v1", docs[1]["supersedes"].AsString);
|
||||
Assert.Equal("advisory_raw:test:gamma:v2", docs[2]["supersedes"].AsString);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisorySupersedesBackfillMigration_IsIdempotentWhenViewExists()
|
||||
{
|
||||
var databaseName = $"advraw-supersedes-idem-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("advisory_backup_20251028");
|
||||
await database.RunCommandAsync<BsonDocument>(new BsonDocument
|
||||
{
|
||||
{ "create", MongoStorageDefaults.Collections.Advisory },
|
||||
{ "viewOn", "advisory_backup_20251028" },
|
||||
});
|
||||
|
||||
var rawCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
|
||||
await rawCollection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:delta:v1",
|
||||
vendor: "test",
|
||||
upstreamId: "DELTA",
|
||||
contentHash: "sha256:aaa",
|
||||
tenant: "tenant-d",
|
||||
retrievedAt: new DateTime(2024, 11, 1, 0, 0, 0, DateTimeKind.Utc)),
|
||||
CreateAdvisoryRawDocument(
|
||||
id: "advisory_raw:test:delta:v2",
|
||||
vendor: "test",
|
||||
upstreamId: "DELTA",
|
||||
contentHash: "sha256:bbb",
|
||||
tenant: "tenant-d",
|
||||
retrievedAt: new DateTime(2024, 11, 3, 0, 0, 0, DateTimeKind.Utc)),
|
||||
});
|
||||
|
||||
await rawCollection.UpdateOneAsync(
|
||||
Builders<BsonDocument>.Filter.Eq("_id", "advisory_raw:test:delta:v2"),
|
||||
Builders<BsonDocument>.Update.Set("supersedes", "advisory_raw:test:delta:v1"));
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisorySupersedesBackfillMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var info = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.Advisory);
|
||||
Assert.NotNull(info);
|
||||
Assert.Equal("view", info!["type"].AsString);
|
||||
Assert.True(ViewTargets(info!, "advisory_backup_20251028"));
|
||||
|
||||
var docs = await rawCollection.Find(Builders<BsonDocument>.Filter.Empty).ToListAsync();
|
||||
Assert.Equal(BsonNull.Value, docs.Single(d => d["_id"].AsString == "advisory_raw:test:delta:v1").GetValue("supersedes", BsonNull.Value));
|
||||
Assert.Equal("advisory_raw:test:delta:v1", docs.Single(d => d["_id"].AsString == "advisory_raw:test:delta:v2")["supersedes"].AsString);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<BsonDocument> GetCollectionInfoAsync(IMongoDatabase database, string name)
|
||||
{
|
||||
var command = new BsonDocument
|
||||
{
|
||||
{ "listCollections", 1 },
|
||||
{ "filter", new BsonDocument("name", name) },
|
||||
};
|
||||
|
||||
var result = await database.RunCommandAsync<BsonDocument>(command);
|
||||
var batch = result["cursor"]["firstBatch"].AsBsonArray;
|
||||
return batch.Single().AsBsonDocument;
|
||||
}
|
||||
|
||||
private static bool ViewTargets(BsonDocument info, string expectedSource)
|
||||
{
|
||||
if (!info.TryGetValue("options", out var options) || options is not BsonDocument optionsDoc)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return optionsDoc.TryGetValue("viewOn", out var viewOn) && string.Equals(viewOn.AsString, expectedSource, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static BsonDocument CreateAdvisoryRawDocument(string id, string vendor, string upstreamId, string contentHash, string tenant, DateTime retrievedAt)
|
||||
{
|
||||
return new BsonDocument
|
||||
{
|
||||
{ "_id", id },
|
||||
{ "tenant", tenant },
|
||||
{
|
||||
"source",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "vendor", vendor },
|
||||
{ "connector", "test-connector" },
|
||||
{ "version", "1.0.0" },
|
||||
}
|
||||
},
|
||||
{
|
||||
"upstream",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "upstream_id", upstreamId },
|
||||
{ "document_version", "1" },
|
||||
{ "retrieved_at", retrievedAt },
|
||||
{ "content_hash", contentHash },
|
||||
{ "signature", new BsonDocument { { "present", false } } },
|
||||
{ "provenance", new BsonDocument { { "http.method", "GET" } } },
|
||||
}
|
||||
},
|
||||
{
|
||||
"content",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "format", "csaf" },
|
||||
{ "raw", new BsonDocument("id", upstreamId) },
|
||||
}
|
||||
},
|
||||
{
|
||||
"identifiers",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray(new[] { upstreamId }) },
|
||||
{ "primary", upstreamId },
|
||||
}
|
||||
},
|
||||
{
|
||||
"linkset",
|
||||
new BsonDocument
|
||||
{
|
||||
{ "aliases", new BsonArray() },
|
||||
{ "purls", new BsonArray() },
|
||||
{ "cpes", new BsonArray() },
|
||||
{ "references", new BsonArray() },
|
||||
{ "reconciled_from", new BsonArray() },
|
||||
{ "notes", new BsonDocument() },
|
||||
}
|
||||
},
|
||||
{ "advisory_key", upstreamId.ToUpperInvariant() },
|
||||
{
|
||||
"links",
|
||||
new BsonArray
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
{ "scheme", "PRIMARY" },
|
||||
{ "value", upstreamId.ToUpperInvariant() }
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "created_at", retrievedAt },
|
||||
{ "ingested_at", retrievedAt },
|
||||
{ "supersedes", BsonNull.Value }
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,223 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Conflicts;
|
||||
using StellaOps.Concelier.Storage.Mongo.Events;
|
||||
using StellaOps.Concelier.Storage.Mongo.Statements;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Provenance.Mongo;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoAdvisoryEventRepositoryTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
private readonly MongoAdvisoryEventRepository _repository;
|
||||
private static readonly ICryptoHash Hash = CryptoHashFactory.CreateDefault();
|
||||
|
||||
public MongoAdvisoryEventRepositoryTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
var statementStore = new AdvisoryStatementStore(_database);
|
||||
var conflictStore = new AdvisoryConflictStore(_database);
|
||||
_repository = new MongoAdvisoryEventRepository(statementStore, conflictStore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndFetchStatements_RoundTripsCanonicalPayload()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-7777", "Sample advisory");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-7777",
|
||||
"CVE-2025-7777",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-19T14:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-19T14:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var results = await _repository.GetStatementsAsync("CVE-2025-7777", null, CancellationToken.None);
|
||||
|
||||
var snapshot = Assert.Single(results);
|
||||
Assert.Equal(entry.StatementId, snapshot.StatementId);
|
||||
Assert.Equal(entry.CanonicalJson, snapshot.CanonicalJson);
|
||||
Assert.True(entry.StatementHash.SequenceEqual(snapshot.StatementHash));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndFetchConflicts_PreservesDetails()
|
||||
{
|
||||
var detailJson = CanonicalJsonSerializer.Serialize(new ConflictPayload("severity", "mismatch"));
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(detailJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
var statementIds = ImmutableArray.Create(Guid.NewGuid(), Guid.NewGuid());
|
||||
|
||||
var entry = new AdvisoryConflictEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-4242",
|
||||
detailJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-19T15:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-19T15:05:00Z"),
|
||||
statementIds);
|
||||
|
||||
await _repository.InsertConflictsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var results = await _repository.GetConflictsAsync("CVE-2025-4242", null, CancellationToken.None);
|
||||
|
||||
var conflict = Assert.Single(results);
|
||||
Assert.Equal(entry.CanonicalJson, conflict.CanonicalJson);
|
||||
Assert.True(entry.StatementIds.SequenceEqual(conflict.StatementIds));
|
||||
Assert.True(entry.ConflictHash.SequenceEqual(conflict.ConflictHash));
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task InsertStatementsAsync_PersistsProvenanceMetadata()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-8888", "Metadata coverage");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
var (dsse, trust) = CreateSampleDsseMetadata();
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-8888",
|
||||
"CVE-2025-8888",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-20T10:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-20T10:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty,
|
||||
dsse,
|
||||
trust);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var statements = _database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements);
|
||||
var stored = await statements
|
||||
.Find(Builders<BsonDocument>.Filter.Eq("_id", entry.StatementId.ToString()))
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(stored);
|
||||
var provenance = stored!["provenance"].AsBsonDocument["dsse"].AsBsonDocument;
|
||||
Assert.Equal(dsse.EnvelopeDigest, provenance["envelopeDigest"].AsString);
|
||||
Assert.Equal(dsse.Key.KeyId, provenance["key"].AsBsonDocument["keyId"].AsString);
|
||||
|
||||
var trustDoc = stored["trust"].AsBsonDocument;
|
||||
Assert.Equal(trust.Verifier, trustDoc["verifier"].AsString);
|
||||
Assert.Equal(trust.Witnesses, trustDoc["witnesses"].AsInt32);
|
||||
|
||||
var roundTrip = await _repository.GetStatementsAsync("CVE-2025-8888", null, CancellationToken.None);
|
||||
var hydrated = Assert.Single(roundTrip);
|
||||
Assert.NotNull(hydrated.Provenance);
|
||||
Assert.NotNull(hydrated.Trust);
|
||||
Assert.Equal(dsse.EnvelopeDigest, hydrated.Provenance!.EnvelopeDigest);
|
||||
Assert.Equal(trust.Verifier, hydrated.Trust!.Verifier);
|
||||
}
|
||||
|
||||
private static Advisory CreateSampleAdvisory(string key, string summary)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance("nvd", "document", key, DateTimeOffset.Parse("2025-10-18T00:00:00Z"), new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
key,
|
||||
key,
|
||||
summary,
|
||||
"en",
|
||||
DateTimeOffset.Parse("2025-10-17T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-18T00:00:00Z"),
|
||||
"medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { key },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task AttachStatementProvenanceAsync_BackfillsExistingRecord()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-9999", "Backfill metadata");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
|
||||
var hash = ImmutableArray.Create(digest);
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-9999",
|
||||
"CVE-2025-9999",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-21T10:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-21T10:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var (dsse, trust) = CreateSampleDsseMetadata();
|
||||
await _repository.AttachStatementProvenanceAsync(entry.StatementId, dsse, trust, CancellationToken.None);
|
||||
|
||||
var statements = await _repository.GetStatementsAsync("CVE-2025-9999", null, CancellationToken.None);
|
||||
var updated = Assert.Single(statements);
|
||||
Assert.NotNull(updated.Provenance);
|
||||
Assert.NotNull(updated.Trust);
|
||||
Assert.Equal(dsse.EnvelopeDigest, updated.Provenance!.EnvelopeDigest);
|
||||
Assert.Equal(trust.Verifier, updated.Trust!.Verifier);
|
||||
}
|
||||
|
||||
private static (DsseProvenance Provenance, TrustInfo Trust) CreateSampleDsseMetadata()
|
||||
{
|
||||
var provenance = new DsseProvenance
|
||||
{
|
||||
EnvelopeDigest = "sha256:deadbeef",
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Key = new DsseKeyInfo
|
||||
{
|
||||
KeyId = "cosign:SHA256-PKIX:TEST",
|
||||
Issuer = "fulcio",
|
||||
Algo = "ECDSA"
|
||||
},
|
||||
Rekor = new DsseRekorInfo
|
||||
{
|
||||
LogIndex = 42,
|
||||
Uuid = Guid.Parse("2d4d5f7c-1111-4a01-b9cb-aa42022a0a8c").ToString(),
|
||||
IntegratedTime = 1_700_000_000
|
||||
}
|
||||
};
|
||||
|
||||
var trust = new TrustInfo
|
||||
{
|
||||
Verified = true,
|
||||
Verifier = "Authority@stella",
|
||||
Witnesses = 2,
|
||||
PolicyScore = 0.9
|
||||
};
|
||||
|
||||
return (provenance, trust);
|
||||
}
|
||||
|
||||
private sealed record ConflictPayload(string Type, string Reason);
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoBootstrapperTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoBootstrapperTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CreatesNormalizedIndexesWhenSemVerStyleEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-semver-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = true }),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var indexCursor = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var indexNames = (await indexCursor.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_normalizedVersions_pkg_scheme_type", indexNames);
|
||||
Assert.Contains("advisory_normalizedVersions_value", indexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_DoesNotCreateNormalizedIndexesWhenFeatureDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-no-semver-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = false }),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var indexCursor = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var indexNames = (await indexCursor.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.DoesNotContain("advisory_normalizedVersions_pkg_scheme_type", indexNames);
|
||||
Assert.DoesNotContain("advisory_normalizedVersions_value", indexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CreatesAdvisoryEventIndexes()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-events-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var statementIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var statementIndexNames = (await statementIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_statements_vulnerability_asof_desc", statementIndexNames);
|
||||
Assert.Contains("advisory_statements_statementHash_unique", statementIndexNames);
|
||||
|
||||
var conflictIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryConflicts)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var conflictIndexNames = (await conflictIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_conflicts_vulnerability_asof_desc", conflictIndexNames);
|
||||
Assert.Contains("advisory_conflicts_conflictHash_unique", conflictIndexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoJobStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoJobStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateStartCompleteLifecycle()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var request = new JobRunCreateRequest(
|
||||
Kind: "mongo:test",
|
||||
Trigger: "unit",
|
||||
Parameters: new Dictionary<string, object?> { ["scope"] = "lifecycle" },
|
||||
ParametersHash: "abc",
|
||||
Timeout: TimeSpan.FromSeconds(5),
|
||||
LeaseDuration: TimeSpan.FromSeconds(2),
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var created = await store.CreateAsync(request, CancellationToken.None);
|
||||
Assert.Equal(JobRunStatus.Pending, created.Status);
|
||||
|
||||
var started = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(started);
|
||||
Assert.Equal(JobRunStatus.Running, started!.Status);
|
||||
|
||||
var completed = await store.TryCompleteAsync(created.RunId, new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
|
||||
Assert.NotNull(completed);
|
||||
Assert.Equal(JobRunStatus.Succeeded, completed!.Status);
|
||||
|
||||
var recent = await store.GetRecentRunsAsync("mongo:test", 10, CancellationToken.None);
|
||||
var snapshot = Assert.Single(recent);
|
||||
Assert.Equal(JobRunStatus.Succeeded, snapshot.Status);
|
||||
|
||||
var active = await store.GetActiveRunsAsync(CancellationToken.None);
|
||||
Assert.Empty(active);
|
||||
|
||||
var last = await store.GetLastRunAsync("mongo:test", CancellationToken.None);
|
||||
Assert.NotNull(last);
|
||||
Assert.Equal(completed.RunId, last!.RunId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StartAndFailRunHonorsStateTransitions()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var request = new JobRunCreateRequest(
|
||||
Kind: "mongo:failure",
|
||||
Trigger: "unit",
|
||||
Parameters: new Dictionary<string, object?>(),
|
||||
ParametersHash: null,
|
||||
Timeout: null,
|
||||
LeaseDuration: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var created = await store.CreateAsync(request, CancellationToken.None);
|
||||
var firstStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(firstStart);
|
||||
|
||||
// Second start attempt should be rejected once running.
|
||||
var secondStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow.AddSeconds(1), CancellationToken.None);
|
||||
Assert.Null(secondStart);
|
||||
|
||||
var failure = await store.TryCompleteAsync(
|
||||
created.RunId,
|
||||
new JobRunCompletion(JobRunStatus.Failed, DateTimeOffset.UtcNow.AddSeconds(2), "boom"),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotNull(failure);
|
||||
Assert.Equal("boom", failure!.Error);
|
||||
Assert.Equal(JobRunStatus.Failed, failure.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompletingUnknownRunReturnsNull()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var result = await store.TryCompleteAsync(Guid.NewGuid(), new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
private async Task ResetCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Jobs);
|
||||
}
|
||||
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoSourceStateRepositoryTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoSourceStateRepositoryTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndUpdateCursorFlow()
|
||||
{
|
||||
var repository = new MongoSourceStateRepository(_fixture.Database, NullLogger<MongoSourceStateRepository>.Instance);
|
||||
var sourceName = "nvd";
|
||||
|
||||
var record = new SourceStateRecord(
|
||||
SourceName: sourceName,
|
||||
Enabled: true,
|
||||
Paused: false,
|
||||
Cursor: new BsonDocument("page", 1),
|
||||
LastSuccess: null,
|
||||
LastFailure: null,
|
||||
FailCount: 0,
|
||||
BackoffUntil: null,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
LastFailureReason: null);
|
||||
|
||||
var upserted = await repository.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.True(upserted.Enabled);
|
||||
|
||||
var cursor = new BsonDocument("page", 2);
|
||||
var updated = await repository.UpdateCursorAsync(sourceName, cursor, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(updated);
|
||||
Assert.Equal(0, updated!.FailCount);
|
||||
Assert.Equal(2, updated.Cursor["page"].AsInt32);
|
||||
|
||||
var failure = await repository.MarkFailureAsync(sourceName, DateTimeOffset.UtcNow, TimeSpan.FromMinutes(5), "network timeout", CancellationToken.None);
|
||||
Assert.NotNull(failure);
|
||||
Assert.Equal(1, failure!.FailCount);
|
||||
Assert.NotNull(failure.BackoffUntil);
|
||||
Assert.Equal("network timeout", failure.LastFailureReason);
|
||||
|
||||
var fetched = await repository.TryGetAsync(sourceName, CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(failure.BackoffUntil, fetched!.BackoffUntil);
|
||||
Assert.Equal("network timeout", fetched.LastFailureReason);
|
||||
}
|
||||
}
|
||||
@@ -1,95 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
public sealed class AdvisoryObservationDocumentFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToModel_MapsDocumentToModel()
|
||||
{
|
||||
var document = new AdvisoryObservationDocument
|
||||
{
|
||||
Id = "tenant-a:obs-1",
|
||||
Tenant = "tenant-a",
|
||||
CreatedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc),
|
||||
Source = new AdvisoryObservationSourceDocument
|
||||
{
|
||||
Vendor = "vendor",
|
||||
Stream = "stream",
|
||||
Api = "https://api.example"
|
||||
},
|
||||
Upstream = new AdvisoryObservationUpstreamDocument
|
||||
{
|
||||
UpstreamId = "CVE-2025-1234",
|
||||
DocumentVersion = "1",
|
||||
FetchedAt = DateTime.SpecifyKind(DateTime.UtcNow.AddMinutes(-1), DateTimeKind.Utc),
|
||||
ReceivedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc),
|
||||
ContentHash = "sha256:abc",
|
||||
Signature = new AdvisoryObservationSignatureDocument
|
||||
{
|
||||
Present = true,
|
||||
Format = "pgp",
|
||||
KeyId = "key",
|
||||
Signature = "signature"
|
||||
}
|
||||
},
|
||||
Content = new AdvisoryObservationContentDocument
|
||||
{
|
||||
Format = "CSAF",
|
||||
SpecVersion = "2.0",
|
||||
Raw = BsonDocument.Parse("{\"example\":true}")
|
||||
},
|
||||
Linkset = new AdvisoryObservationLinksetDocument
|
||||
{
|
||||
Aliases = new List<string> { "CVE-2025-1234" },
|
||||
Purls = new List<string> { "pkg:generic/foo@1.0.0" },
|
||||
Cpes = new List<string> { "cpe:/a:vendor:product:1" },
|
||||
References = new List<AdvisoryObservationReferenceDocument>
|
||||
{
|
||||
new() { Type = "advisory", Url = "https://example.com" }
|
||||
}
|
||||
},
|
||||
RawLinkset = new AdvisoryObservationRawLinksetDocument
|
||||
{
|
||||
Aliases = new List<string> { "CVE-2025-1234", "cve-2025-1234" },
|
||||
Scopes = new List<string> { "runtime", "build" },
|
||||
Relationships = new List<AdvisoryObservationRawRelationshipDocument>
|
||||
{
|
||||
new() { Type = "depends_on", Source = "componentA", Target = "componentB", Provenance = "sbom-manifest" }
|
||||
},
|
||||
PackageUrls = new List<string> { "pkg:generic/foo@1.0.0" },
|
||||
Cpes = new List<string> { "cpe:/a:vendor:product:1" },
|
||||
References = new List<AdvisoryObservationRawReferenceDocument>
|
||||
{
|
||||
new() { Type = "Advisory", Url = "https://example.com", Source = "vendor" }
|
||||
},
|
||||
ReconciledFrom = new List<string> { "source-a" },
|
||||
Notes = new Dictionary<string, string> { ["note-key"] = "note-value" }
|
||||
}
|
||||
};
|
||||
|
||||
var observation = AdvisoryObservationDocumentFactory.ToModel(document);
|
||||
|
||||
Assert.Equal("tenant-a:obs-1", observation.ObservationId);
|
||||
Assert.Equal("tenant-a", observation.Tenant);
|
||||
Assert.Equal("CVE-2025-1234", observation.Upstream.UpstreamId);
|
||||
Assert.Equal(new[] { "CVE-2025-1234" }, observation.Linkset.Aliases.ToArray());
|
||||
Assert.Contains("pkg:generic/foo@1.0.0", observation.Linkset.Purls);
|
||||
Assert.Equal("CSAF", observation.Content.Format);
|
||||
Assert.True(observation.Content.Raw?["example"]?.GetValue<bool>());
|
||||
Assert.Equal(document.Linkset.References![0].Type, observation.Linkset.References[0].Type);
|
||||
Assert.Equal(new[] { "CVE-2025-1234", "cve-2025-1234" }, observation.RawLinkset.Aliases);
|
||||
Assert.Equal(new[] { "runtime", "build" }, observation.RawLinkset.Scopes);
|
||||
Assert.Equal("depends_on", observation.RawLinkset.Relationships[0].Type);
|
||||
Assert.Equal("componentA", observation.RawLinkset.Relationships[0].Source);
|
||||
Assert.Equal("componentB", observation.RawLinkset.Relationships[0].Target);
|
||||
Assert.Equal("sbom-manifest", observation.RawLinkset.Relationships[0].Provenance);
|
||||
Assert.Equal("Advisory", observation.RawLinkset.References[0].Type);
|
||||
Assert.Equal("vendor", observation.RawLinkset.References[0].Source);
|
||||
Assert.Equal("note-value", observation.RawLinkset.Notes["note-key"]);
|
||||
}
|
||||
}
|
||||
@@ -1,260 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryObservationStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AdvisoryObservationStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByFiltersAsync_FiltersByAliasAndTenant()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateDocument(
|
||||
id: "tenant-a:nvd:alpha:1",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 1, 1, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "CvE-2025-0001 " },
|
||||
purls: new[] { "pkg:npm/demo@1.0.0" }),
|
||||
CreateDocument(
|
||||
id: "tenant-a:ghsa:beta:1",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 1, 2, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { " ghsa-xyz0", "cve-2025-0001" },
|
||||
purls: new[] { "pkg:npm/demo@1.1.0" }),
|
||||
CreateDocument(
|
||||
id: "tenant-b:nvd:alpha:1",
|
||||
tenant: "tenant-b",
|
||||
createdAt: new DateTime(2025, 1, 3, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "cve-2025-0001" },
|
||||
purls: new[] { "pkg:npm/demo@2.0.0" })
|
||||
});
|
||||
|
||||
var store = new AdvisoryObservationStore(collection);
|
||||
var result = await store.FindByFiltersAsync(
|
||||
tenant: "Tenant-A",
|
||||
observationIds: Array.Empty<string>(),
|
||||
aliases: new[] { " CVE-2025-0001 " },
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
cursor: null,
|
||||
limit: 5,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, result.Count);
|
||||
Assert.Equal("tenant-a:ghsa:beta:1", result[0].ObservationId);
|
||||
Assert.Equal("tenant-a:nvd:alpha:1", result[1].ObservationId);
|
||||
Assert.All(result, observation => Assert.Equal("tenant-a", observation.Tenant));
|
||||
Assert.Equal("ghsa-xyz0", result[0].Linkset.Aliases[0]);
|
||||
Assert.Equal("CvE-2025-0001", result[1].Linkset.Aliases[0]);
|
||||
Assert.Equal(" ghsa-xyz0", result[0].RawLinkset.Aliases[0]);
|
||||
Assert.Equal("CvE-2025-0001 ", result[1].RawLinkset.Aliases[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByFiltersAsync_RespectsObservationIdsAndPurls()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateDocument(
|
||||
id: "tenant-a:osv:alpha:1",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 2, 1, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "cve-2025-0100" },
|
||||
purls: new[] { "pkg:pypi/demo@2.0.0" },
|
||||
cpes: new[] { "cpe:/a:vendor:product:2.0" }),
|
||||
CreateDocument(
|
||||
id: "tenant-a:osv:alpha:2",
|
||||
tenant: "tenant-a",
|
||||
createdAt: new DateTime(2025, 2, 2, 0, 0, 0, DateTimeKind.Utc),
|
||||
aliases: new[] { "cve-2025-0100" },
|
||||
purls: new[] { "pkg:pypi/demo@2.1.0" },
|
||||
cpes: new[] { "cpe:/a:vendor:product:2.1" })
|
||||
});
|
||||
|
||||
var store = new AdvisoryObservationStore(collection);
|
||||
var result = await store.FindByFiltersAsync(
|
||||
tenant: "tenant-a",
|
||||
observationIds: new[] { "tenant-a:osv:alpha:1" },
|
||||
aliases: Array.Empty<string>(),
|
||||
purls: new[] { "pkg:pypi/demo@2.0.0" },
|
||||
cpes: new[] { "cpe:/a:vendor:product:2.0" },
|
||||
cursor: null,
|
||||
limit: 5,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Single(result);
|
||||
Assert.Equal("tenant-a:osv:alpha:1", result[0].ObservationId);
|
||||
Assert.Equal(
|
||||
new[] { "pkg:pypi/demo@2.0.0" },
|
||||
result[0].Linkset.Purls.ToArray());
|
||||
Assert.Equal(
|
||||
new[] { "cpe:/a:vendor:product:2.0" },
|
||||
result[0].Linkset.Cpes.ToArray());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindByFiltersAsync_AppliesCursorForPagination()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
|
||||
var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
var createdAt = new DateTime(2025, 3, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
await collection.InsertManyAsync(new[]
|
||||
{
|
||||
CreateDocument("tenant-a:source:1", "tenant-a", createdAt, aliases: new[] { "cve-1" }),
|
||||
CreateDocument("tenant-a:source:2", "tenant-a", createdAt.AddMinutes(-1), aliases: new[] { "cve-2" }),
|
||||
CreateDocument("tenant-a:source:3", "tenant-a", createdAt.AddMinutes(-2), aliases: new[] { "cve-3" })
|
||||
});
|
||||
|
||||
var store = new AdvisoryObservationStore(collection);
|
||||
|
||||
var firstPage = await store.FindByFiltersAsync(
|
||||
tenant: "tenant-a",
|
||||
observationIds: Array.Empty<string>(),
|
||||
aliases: Array.Empty<string>(),
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
cursor: null,
|
||||
limit: 2,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, firstPage.Count);
|
||||
Assert.Equal("tenant-a:source:1", firstPage[0].ObservationId);
|
||||
Assert.Equal("tenant-a:source:2", firstPage[1].ObservationId);
|
||||
|
||||
var cursor = new AdvisoryObservationCursor(firstPage[1].CreatedAt, firstPage[1].ObservationId);
|
||||
var secondPage = await store.FindByFiltersAsync(
|
||||
tenant: "tenant-a",
|
||||
observationIds: Array.Empty<string>(),
|
||||
aliases: Array.Empty<string>(),
|
||||
purls: Array.Empty<string>(),
|
||||
cpes: Array.Empty<string>(),
|
||||
cursor: cursor,
|
||||
limit: 2,
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Single(secondPage);
|
||||
Assert.Equal("tenant-a:source:3", secondPage[0].ObservationId);
|
||||
}
|
||||
|
||||
private static AdvisoryObservationDocument CreateDocument(
|
||||
string id,
|
||||
string tenant,
|
||||
DateTime createdAt,
|
||||
IEnumerable<string>? aliases = null,
|
||||
IEnumerable<string>? purls = null,
|
||||
IEnumerable<string>? cpes = null)
|
||||
{
|
||||
var canonicalAliases = aliases?
|
||||
.Where(value => value is not null)
|
||||
.Select(value => value.Trim())
|
||||
.ToList();
|
||||
|
||||
var canonicalPurls = purls?
|
||||
.Where(value => value is not null)
|
||||
.Select(value => value.Trim())
|
||||
.ToList();
|
||||
|
||||
var canonicalCpes = cpes?
|
||||
.Where(value => value is not null)
|
||||
.Select(value => value.Trim())
|
||||
.ToList();
|
||||
|
||||
var rawAliases = aliases?
|
||||
.Where(value => value is not null)
|
||||
.ToList();
|
||||
|
||||
var rawPurls = purls?
|
||||
.Where(value => value is not null)
|
||||
.ToList();
|
||||
|
||||
var rawCpes = cpes?
|
||||
.Where(value => value is not null)
|
||||
.ToList();
|
||||
|
||||
return new AdvisoryObservationDocument
|
||||
{
|
||||
Id = id,
|
||||
Tenant = tenant.ToLowerInvariant(),
|
||||
CreatedAt = createdAt,
|
||||
Source = new AdvisoryObservationSourceDocument
|
||||
{
|
||||
Vendor = "nvd",
|
||||
Stream = "feed",
|
||||
Api = "https://example.test/api"
|
||||
},
|
||||
Upstream = new AdvisoryObservationUpstreamDocument
|
||||
{
|
||||
UpstreamId = id,
|
||||
DocumentVersion = null,
|
||||
FetchedAt = createdAt,
|
||||
ReceivedAt = createdAt,
|
||||
ContentHash = $"sha256:{id}",
|
||||
Signature = new AdvisoryObservationSignatureDocument
|
||||
{
|
||||
Present = false
|
||||
},
|
||||
Metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
},
|
||||
Content = new AdvisoryObservationContentDocument
|
||||
{
|
||||
Format = "csaf",
|
||||
SpecVersion = "2.0",
|
||||
Raw = BsonDocument.Parse("""{"id": "%ID%"}""".Replace("%ID%", id)),
|
||||
Metadata = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
},
|
||||
Linkset = new AdvisoryObservationLinksetDocument
|
||||
{
|
||||
Aliases = canonicalAliases,
|
||||
Purls = canonicalPurls,
|
||||
Cpes = canonicalCpes,
|
||||
References = new List<AdvisoryObservationReferenceDocument>()
|
||||
},
|
||||
RawLinkset = new AdvisoryObservationRawLinksetDocument
|
||||
{
|
||||
Aliases = rawAliases,
|
||||
PackageUrls = rawPurls,
|
||||
Cpes = rawCpes,
|
||||
References = new List<AdvisoryObservationRawReferenceDocument>()
|
||||
},
|
||||
Attributes = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
};
|
||||
}
|
||||
|
||||
private async Task ResetCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations);
|
||||
}
|
||||
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Collection did not exist – ignore.
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Core.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
public class AdvisoryObservationTransportWorkerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Worker_publishes_outbox_entries_and_marks_published_once()
|
||||
{
|
||||
var evt = new AdvisoryObservationUpdatedEvent(
|
||||
Guid.NewGuid(),
|
||||
"tenant-1",
|
||||
"obs-1",
|
||||
"adv-1",
|
||||
new Models.Observations.AdvisoryObservationSource("vendor", "stream", "api", "1.0.0"),
|
||||
new AdvisoryObservationLinksetSummary(
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<string>.Empty,
|
||||
ImmutableArray<AdvisoryObservationRelationshipSummary>.Empty),
|
||||
"doc-sha",
|
||||
"hash-1",
|
||||
DateTimeOffset.UtcNow,
|
||||
ReplayCursor: "cursor-1",
|
||||
SupersedesId: null,
|
||||
TraceId: "trace-1");
|
||||
|
||||
var outbox = new FakeOutbox(evt);
|
||||
var transport = new FakeTransport();
|
||||
var options = Options.Create(new AdvisoryObservationEventPublisherOptions
|
||||
{
|
||||
Enabled = true,
|
||||
Transport = "nats",
|
||||
Subject = "subject",
|
||||
Stream = "stream",
|
||||
NatsUrl = "nats://localhost:4222"
|
||||
});
|
||||
|
||||
var worker = new AdvisoryObservationTransportWorker(outbox, transport, options, NullLogger<AdvisoryObservationTransportWorker>.Instance);
|
||||
|
||||
await worker.StartAsync(CancellationToken.None);
|
||||
await Task.Delay(150, CancellationToken.None);
|
||||
await worker.StopAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, transport.Sent.Count);
|
||||
Assert.Equal(evt.EventId, transport.Sent[0].EventId);
|
||||
Assert.Equal(1, outbox.MarkedCount);
|
||||
}
|
||||
|
||||
private sealed class FakeOutbox : IAdvisoryObservationEventOutbox
|
||||
{
|
||||
private readonly AdvisoryObservationUpdatedEvent _event;
|
||||
private bool _dequeued;
|
||||
public int MarkedCount { get; private set; }
|
||||
|
||||
public FakeOutbox(AdvisoryObservationUpdatedEvent @event)
|
||||
{
|
||||
_event = @event;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyCollection<AdvisoryObservationUpdatedEvent>> DequeueAsync(int take, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_dequeued)
|
||||
{
|
||||
return Task.FromResult<IReadOnlyCollection<AdvisoryObservationUpdatedEvent>>(Array.Empty<AdvisoryObservationUpdatedEvent>());
|
||||
}
|
||||
|
||||
_dequeued = true;
|
||||
return Task.FromResult<IReadOnlyCollection<AdvisoryObservationUpdatedEvent>>(new[] { _event });
|
||||
}
|
||||
|
||||
public Task MarkPublishedAsync(Guid eventId, DateTimeOffset publishedAt, CancellationToken cancellationToken)
|
||||
{
|
||||
MarkedCount++;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeTransport : IAdvisoryObservationEventTransport
|
||||
{
|
||||
public List<AdvisoryObservationUpdatedEvent> Sent { get; } = new();
|
||||
|
||||
public Task SendAsync(AdvisoryObservationUpdatedEvent @event, CancellationToken cancellationToken)
|
||||
{
|
||||
Sent.Add(@event);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Models.Observations;
|
||||
using StellaOps.Concelier.Storage.Mongo.Observations.V1;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations;
|
||||
|
||||
public sealed class AdvisoryObservationV1DocumentFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void ObservationIdBuilder_IsDeterministic()
|
||||
{
|
||||
var id1 = ObservationIdBuilder.Create("TENANT", "Ghsa", "GHSA-1234", "sha256:abc");
|
||||
var id2 = ObservationIdBuilder.Create("tenant", "ghsa", "GHSA-1234", "sha256:abc");
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToModel_MapsAndNormalizes()
|
||||
{
|
||||
var document = new AdvisoryObservationV1Document
|
||||
{
|
||||
Id = new ObjectId("6710f1f1a1b2c3d4e5f60708"),
|
||||
TenantId = "TENANT-01",
|
||||
Source = "GHSA",
|
||||
AdvisoryId = "GHSA-2025-0001",
|
||||
Title = "Test title",
|
||||
Summary = "Summary",
|
||||
Severities = new List<ObservationSeverityDocument>
|
||||
{
|
||||
new() { System = "cvssv3.1", Score = 7.5, Vector = "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N" }
|
||||
},
|
||||
Affected = new List<ObservationAffectedDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Purl = "pkg:nuget/foo@1.2.3",
|
||||
Package = "foo",
|
||||
Versions = new List<string>{ "1.2.3" },
|
||||
Ranges = new List<ObservationVersionRangeDocument>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Type = "ECOSYSTEM",
|
||||
Events = new List<ObservationRangeEventDocument>
|
||||
{
|
||||
new(){ Event = "introduced", Value = "1.0.0" },
|
||||
new(){ Event = "fixed", Value = "1.2.3" }
|
||||
}
|
||||
}
|
||||
},
|
||||
Ecosystem = "nuget",
|
||||
Cpes = new List<string>{ "cpe:/a:foo:bar:1.2.3" }
|
||||
}
|
||||
},
|
||||
References = new List<string>{ "https://example.test/advisory" },
|
||||
Weaknesses = new List<string>{ "CWE-79" },
|
||||
Published = new DateTime(2025, 11, 1, 0, 0, 0, DateTimeKind.Utc),
|
||||
Modified = new DateTime(2025, 11, 10, 0, 0, 0, DateTimeKind.Utc),
|
||||
IngestedAt = new DateTime(2025, 11, 12, 0, 0, 0, DateTimeKind.Utc),
|
||||
Provenance = new ObservationProvenanceDocument
|
||||
{
|
||||
SourceArtifactSha = "sha256:abc",
|
||||
FetchedAt = new DateTime(2025, 11, 12, 0, 0, 0, DateTimeKind.Utc),
|
||||
IngestJobId = "job-1",
|
||||
Signature = new ObservationSignatureDocument
|
||||
{
|
||||
Present = true,
|
||||
Format = "dsse",
|
||||
KeyId = "k1",
|
||||
Signature = "sig"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var model = AdvisoryObservationV1DocumentFactory.ToModel(document);
|
||||
|
||||
Assert.Equal("6710f1f1a1b2c3d4e5f60708", model.ObservationId);
|
||||
Assert.Equal("tenant-01", model.Tenant);
|
||||
Assert.Equal("ghsa", model.Source);
|
||||
Assert.Equal("GHSA-2025-0001", model.AdvisoryId);
|
||||
Assert.Equal("Test title", model.Title);
|
||||
Assert.Single(model.Severities);
|
||||
Assert.Single(model.Affected);
|
||||
Assert.Single(model.References);
|
||||
Assert.Single(model.Weaknesses);
|
||||
Assert.Equal(new DateTimeOffset(2025, 11, 12, 0, 0, 0, TimeSpan.Zero), model.IngestedAt);
|
||||
Assert.NotNull(model.Provenance.Signature);
|
||||
}
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using MongoDB.Driver.GridFS;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class RawDocumentRetentionServiceTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public RawDocumentRetentionServiceTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SweepExpiredDocumentsAsync_RemovesExpiredRawDocuments()
|
||||
{
|
||||
var database = _fixture.Database;
|
||||
var documents = database.GetCollection<DocumentDocument>(MongoStorageDefaults.Collections.Document);
|
||||
var dtos = database.GetCollection<DtoDocument>(MongoStorageDefaults.Collections.Dto);
|
||||
var bucket = new GridFSBucket(database, new GridFSBucketOptions { BucketName = "documents" });
|
||||
|
||||
var now = new DateTimeOffset(2024, 10, 1, 12, 0, 0, TimeSpan.Zero);
|
||||
var fakeTime = new FakeTimeProvider(now);
|
||||
|
||||
var options = Options.Create(new MongoStorageOptions
|
||||
{
|
||||
ConnectionString = _fixture.Runner.ConnectionString,
|
||||
DatabaseName = database.DatabaseNamespace.DatabaseName,
|
||||
RawDocumentRetention = TimeSpan.FromDays(1),
|
||||
RawDocumentRetentionTtlGrace = TimeSpan.Zero,
|
||||
RawDocumentRetentionSweepInterval = TimeSpan.FromMinutes(5),
|
||||
});
|
||||
|
||||
var expiredId = Guid.NewGuid().ToString();
|
||||
var gridFsId = await bucket.UploadFromBytesAsync("expired", new byte[] { 1, 2, 3 });
|
||||
await documents.InsertOneAsync(new DocumentDocument
|
||||
{
|
||||
Id = expiredId,
|
||||
SourceName = "nvd",
|
||||
Uri = "https://example.test/cve",
|
||||
FetchedAt = now.AddDays(-2).UtcDateTime,
|
||||
Sha256 = "abc",
|
||||
Status = "pending",
|
||||
ExpiresAt = now.AddMinutes(-5).UtcDateTime,
|
||||
GridFsId = gridFsId,
|
||||
});
|
||||
|
||||
await dtos.InsertOneAsync(new DtoDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
DocumentId = expiredId,
|
||||
SourceName = "nvd",
|
||||
SchemaVersion = "schema",
|
||||
Payload = new BsonDocument("value", 1),
|
||||
ValidatedAt = now.UtcDateTime,
|
||||
});
|
||||
|
||||
var freshId = Guid.NewGuid().ToString();
|
||||
await documents.InsertOneAsync(new DocumentDocument
|
||||
{
|
||||
Id = freshId,
|
||||
SourceName = "nvd",
|
||||
Uri = "https://example.test/future",
|
||||
FetchedAt = now.UtcDateTime,
|
||||
Sha256 = "def",
|
||||
Status = "pending",
|
||||
ExpiresAt = now.AddHours(1).UtcDateTime,
|
||||
GridFsId = null,
|
||||
});
|
||||
|
||||
var service = new RawDocumentRetentionService(database, options, NullLogger<RawDocumentRetentionService>.Instance, fakeTime);
|
||||
|
||||
var removed = await service.SweepExpiredDocumentsAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, removed);
|
||||
Assert.Equal(0, await documents.CountDocumentsAsync(d => d.Id == expiredId));
|
||||
Assert.Equal(0, await dtos.CountDocumentsAsync(d => d.DocumentId == expiredId));
|
||||
Assert.Equal(1, await documents.CountDocumentsAsync(d => d.Id == freshId));
|
||||
|
||||
var filter = Builders<GridFSFileInfo>.Filter.Eq("_id", gridFsId);
|
||||
using var cursor = await bucket.FindAsync(filter);
|
||||
Assert.Empty(await cursor.ToListAsync());
|
||||
}
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user