Resolve Concelier/Excititor merge conflicts
This commit is contained in:
@@ -0,0 +1,82 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Conflicts;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryConflictStoreTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
|
||||
public AdvisoryConflictStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndRetrieve_PersistsConflicts()
|
||||
{
|
||||
var store = new AdvisoryConflictStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
var statementIds = new[] { Guid.NewGuid(), Guid.NewGuid() };
|
||||
|
||||
var conflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x10, 0x20 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(30),
|
||||
statementIds,
|
||||
new BsonDocument("explanation", "first-pass"));
|
||||
|
||||
await store.InsertAsync(new[] { conflict }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetConflictsAsync(vulnerabilityKey, null, CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal(conflict.Id, results[0].Id);
|
||||
Assert.Equal(statementIds, results[0].StatementIds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetConflicts_AsOfFilters()
|
||||
{
|
||||
var store = new AdvisoryConflictStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var earlyConflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x01 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(10),
|
||||
new[] { Guid.NewGuid() },
|
||||
new BsonDocument("stage", "early"));
|
||||
|
||||
var lateConflict = new AdvisoryConflictRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x02 },
|
||||
baseTime.AddMinutes(10),
|
||||
baseTime.AddMinutes(10).AddSeconds(15),
|
||||
new[] { Guid.NewGuid() },
|
||||
new BsonDocument("stage", "late"));
|
||||
|
||||
await store.InsertAsync(new[] { earlyConflict, lateConflict }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetConflictsAsync(vulnerabilityKey, baseTime.AddMinutes(1), CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("early", results[0].Details["stage"].AsString);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Statements;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStatementStoreTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
|
||||
public AdvisoryStatementStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndRetrieve_WritesImmutableStatements()
|
||||
{
|
||||
var store = new AdvisoryStatementStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var statements = new[]
|
||||
{
|
||||
new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x01 },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(5),
|
||||
new BsonDocument("version", "A"),
|
||||
new[] { Guid.NewGuid() }),
|
||||
new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0x02 },
|
||||
baseTime.AddMinutes(1),
|
||||
baseTime.AddMinutes(1).AddSeconds(5),
|
||||
new BsonDocument("version", "B"),
|
||||
Array.Empty<Guid>()),
|
||||
};
|
||||
|
||||
await store.InsertAsync(statements, CancellationToken.None);
|
||||
|
||||
var results = await store.GetStatementsAsync(vulnerabilityKey, null, CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, results.Count);
|
||||
Assert.Equal(statements[1].Id, results[0].Id); // sorted by AsOf desc
|
||||
Assert.True(results.All(record => record.Payload.Contains("version")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatements_AsOfFiltersResults()
|
||||
{
|
||||
var store = new AdvisoryStatementStore(_database);
|
||||
var vulnerabilityKey = $"CVE-{Guid.NewGuid():N}";
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
var early = new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0xAA },
|
||||
baseTime,
|
||||
baseTime.AddSeconds(10),
|
||||
new BsonDocument("state", "early"),
|
||||
Array.Empty<Guid>());
|
||||
|
||||
var late = new AdvisoryStatementRecord(
|
||||
Guid.NewGuid(),
|
||||
vulnerabilityKey,
|
||||
vulnerabilityKey,
|
||||
new byte[] { 0xBB },
|
||||
baseTime.AddMinutes(5),
|
||||
baseTime.AddMinutes(5).AddSeconds(10),
|
||||
new BsonDocument("state", "late"),
|
||||
Array.Empty<Guid>());
|
||||
|
||||
await store.InsertAsync(new[] { early, late }, CancellationToken.None);
|
||||
|
||||
var results = await store.GetStatementsAsync(vulnerabilityKey, baseTime.AddMinutes(1), CancellationToken.None);
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("early", results[0].Payload["state"].AsString);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,200 @@
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStorePerformanceTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private const int LargeAdvisoryCount = 30;
|
||||
private const int AliasesPerAdvisory = 24;
|
||||
private const int ReferencesPerAdvisory = 180;
|
||||
private const int AffectedPackagesPerAdvisory = 140;
|
||||
private const int VersionRangesPerPackage = 4;
|
||||
private const int CvssMetricsPerAdvisory = 24;
|
||||
private const int ProvenanceEntriesPerAdvisory = 16;
|
||||
private static readonly string LargeSummary = new('A', 128 * 1024);
|
||||
private static readonly DateTimeOffset BasePublished = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
private static readonly DateTimeOffset BaseRecorded = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
private static readonly TimeSpan TotalBudget = TimeSpan.FromSeconds(28);
|
||||
private const double UpsertBudgetPerAdvisoryMs = 500;
|
||||
private const double FetchBudgetPerAdvisoryMs = 200;
|
||||
private const double FindBudgetPerAdvisoryMs = 200;
|
||||
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public AdvisoryStorePerformanceTests(MongoIntegrationFixture fixture, ITestOutputHelper output)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_output = output;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndQueryLargeAdvisories_CompletesWithinBudget()
|
||||
{
|
||||
var databaseName = $"concelier-performance-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var migrationRunner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
migrationRunner);
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var aliasStore = new AliasStore(database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(45));
|
||||
|
||||
// Warm up collections (indexes, serialization caches) so perf timings exclude one-time setup work.
|
||||
var warmup = CreateLargeAdvisory(-1);
|
||||
await store.UpsertAsync(warmup, cts.Token);
|
||||
_ = await store.FindAsync(warmup.AdvisoryKey, cts.Token);
|
||||
_ = await store.GetRecentAsync(1, cts.Token);
|
||||
|
||||
var advisories = Enumerable.Range(0, LargeAdvisoryCount)
|
||||
.Select(CreateLargeAdvisory)
|
||||
.ToArray();
|
||||
|
||||
var upsertWatch = Stopwatch.StartNew();
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await store.UpsertAsync(advisory, cts.Token);
|
||||
}
|
||||
|
||||
upsertWatch.Stop();
|
||||
var upsertPerAdvisory = upsertWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
var fetchWatch = Stopwatch.StartNew();
|
||||
var recent = await store.GetRecentAsync(LargeAdvisoryCount, cts.Token);
|
||||
fetchWatch.Stop();
|
||||
var fetchPerAdvisory = fetchWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
Assert.Equal(LargeAdvisoryCount, recent.Count);
|
||||
|
||||
var findWatch = Stopwatch.StartNew();
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var fetched = await store.FindAsync(advisory.AdvisoryKey, cts.Token);
|
||||
Assert.NotNull(fetched);
|
||||
}
|
||||
|
||||
findWatch.Stop();
|
||||
var findPerAdvisory = findWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
|
||||
|
||||
var totalElapsed = upsertWatch.Elapsed + fetchWatch.Elapsed + findWatch.Elapsed;
|
||||
|
||||
_output.WriteLine($"Upserted {LargeAdvisoryCount} large advisories in {upsertWatch.Elapsed} ({upsertPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Fetched recent advisories in {fetchWatch.Elapsed} ({fetchPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Looked up advisories individually in {findWatch.Elapsed} ({findPerAdvisory:F2} ms/doc).");
|
||||
_output.WriteLine($"Total elapsed {totalElapsed}.");
|
||||
|
||||
Assert.True(upsertPerAdvisory <= UpsertBudgetPerAdvisoryMs, $"Upsert exceeded {UpsertBudgetPerAdvisoryMs} ms per advisory: {upsertPerAdvisory:F2} ms.");
|
||||
Assert.True(fetchPerAdvisory <= FetchBudgetPerAdvisoryMs, $"GetRecent exceeded {FetchBudgetPerAdvisoryMs} ms per advisory: {fetchPerAdvisory:F2} ms.");
|
||||
Assert.True(findPerAdvisory <= FindBudgetPerAdvisoryMs, $"Find exceeded {FindBudgetPerAdvisoryMs} ms per advisory: {findPerAdvisory:F2} ms.");
|
||||
Assert.True(totalElapsed <= TotalBudget, $"Mongo advisory operations exceeded total budget {TotalBudget}: {totalElapsed}.");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private static Advisory CreateLargeAdvisory(int index)
|
||||
{
|
||||
var baseKey = $"ADV-LARGE-{index:D4}";
|
||||
var published = BasePublished.AddDays(index);
|
||||
var modified = published.AddHours(6);
|
||||
|
||||
var aliases = Enumerable.Range(0, AliasesPerAdvisory)
|
||||
.Select(i => $"ALIAS-{baseKey}-{i:D4}")
|
||||
.ToArray();
|
||||
|
||||
var provenance = Enumerable.Range(0, ProvenanceEntriesPerAdvisory)
|
||||
.Select(i => new AdvisoryProvenance(
|
||||
source: i % 2 == 0 ? "nvd" : "vendor",
|
||||
kind: i % 3 == 0 ? "normalized" : "enriched",
|
||||
value: $"prov-{baseKey}-{i:D3}",
|
||||
recordedAt: BaseRecorded.AddDays(i)))
|
||||
.ToArray();
|
||||
|
||||
var references = Enumerable.Range(0, ReferencesPerAdvisory)
|
||||
.Select(i => new AdvisoryReference(
|
||||
url: $"https://vuln.example.com/{baseKey}/ref/{i:D4}",
|
||||
kind: i % 2 == 0 ? "advisory" : "article",
|
||||
sourceTag: $"tag-{i % 7}",
|
||||
summary: $"Reference {baseKey} #{i}",
|
||||
provenance: provenance[i % provenance.Length]))
|
||||
.ToArray();
|
||||
|
||||
var affectedPackages = Enumerable.Range(0, AffectedPackagesPerAdvisory)
|
||||
.Select(i => new AffectedPackage(
|
||||
type: i % 3 == 0 ? AffectedPackageTypes.Rpm : AffectedPackageTypes.Deb,
|
||||
identifier: $"pkg/{baseKey}/{i:D4}",
|
||||
platform: i % 4 == 0 ? "linux/x86_64" : "linux/aarch64",
|
||||
versionRanges: Enumerable.Range(0, VersionRangesPerPackage)
|
||||
.Select(r => new AffectedVersionRange(
|
||||
rangeKind: r % 2 == 0 ? "semver" : "evr",
|
||||
introducedVersion: $"1.{index}.{i}.{r}",
|
||||
fixedVersion: $"2.{index}.{i}.{r}",
|
||||
lastAffectedVersion: $"1.{index}.{i}.{r}",
|
||||
rangeExpression: $">=1.{index}.{i}.{r} <2.{index}.{i}.{r}",
|
||||
provenance: provenance[(i + r) % provenance.Length]))
|
||||
.ToArray(),
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[]
|
||||
{
|
||||
provenance[i % provenance.Length],
|
||||
provenance[(i + 3) % provenance.Length],
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
var cvssMetrics = Enumerable.Range(0, CvssMetricsPerAdvisory)
|
||||
.Select(i => new CvssMetric(
|
||||
version: i % 2 == 0 ? "3.1" : "2.0",
|
||||
vector: $"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:{(i % 3 == 0 ? "H" : "L")}",
|
||||
baseScore: Math.Max(0, 9.8 - i * 0.2),
|
||||
baseSeverity: i % 3 == 0 ? "critical" : "high",
|
||||
provenance: provenance[i % provenance.Length]))
|
||||
.ToArray();
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: baseKey,
|
||||
title: $"Large advisory {baseKey}",
|
||||
summary: LargeSummary,
|
||||
language: "en",
|
||||
published: published,
|
||||
modified: modified,
|
||||
severity: "critical",
|
||||
exploitKnown: index % 2 == 0,
|
||||
aliases: aliases,
|
||||
references: references,
|
||||
affectedPackages: affectedPackages,
|
||||
cvssMetrics: cvssMetrics,
|
||||
provenance: provenance);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,305 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Advisories;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AdvisoryStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AdvisoryStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndFetchAdvisory()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: "ADV-1",
|
||||
title: "Sample Advisory",
|
||||
summary: "Demo",
|
||||
language: "en",
|
||||
published: DateTimeOffset.UtcNow,
|
||||
modified: DateTimeOffset.UtcNow,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "ALIAS-1" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: Array.Empty<AdvisoryProvenance>());
|
||||
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var fetched = await store.FindAsync("ADV-1", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(advisory.AdvisoryKey, fetched!.AdvisoryKey);
|
||||
|
||||
var recent = await store.GetRecentAsync(5, CancellationToken.None);
|
||||
Assert.NotEmpty(recent);
|
||||
|
||||
var aliases = await aliasStore.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
|
||||
Assert.Contains(aliases, record => record.Scheme == AliasStoreConstants.PrimaryScheme && record.Value == "ADV-1");
|
||||
Assert.Contains(aliases, record => record.Value == "ALIAS-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RangePrimitives_RoundTripThroughMongo()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
TimeProvider.System);
|
||||
|
||||
var recordedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var provenance = new AdvisoryProvenance("source-x", "mapper", "payload-123", recordedAt);
|
||||
var rangePrimitives = new RangePrimitives(
|
||||
new SemVerPrimitive(
|
||||
Introduced: "1.0.0",
|
||||
IntroducedInclusive: true,
|
||||
Fixed: "1.2.0",
|
||||
FixedInclusive: false,
|
||||
LastAffected: "1.1.5",
|
||||
LastAffectedInclusive: true,
|
||||
ConstraintExpression: ">=1.0.0 <1.2.0"),
|
||||
new NevraPrimitive(
|
||||
Introduced: new NevraComponent("pkg", 0, "1.0.0", "1", "x86_64"),
|
||||
Fixed: new NevraComponent("pkg", 1, "1.2.0", "2", "x86_64"),
|
||||
LastAffected: null),
|
||||
new EvrPrimitive(
|
||||
Introduced: new EvrComponent(1, "1.0.0", "1"),
|
||||
Fixed: null,
|
||||
LastAffected: new EvrComponent(1, "1.1.5", null)),
|
||||
new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["channel"] = "stable",
|
||||
["notesHash"] = "abc123",
|
||||
});
|
||||
|
||||
var versionRange = new AffectedVersionRange(
|
||||
rangeKind: "semver",
|
||||
introducedVersion: "1.0.0",
|
||||
fixedVersion: "1.2.0",
|
||||
lastAffectedVersion: "1.1.5",
|
||||
rangeExpression: ">=1.0.0 <1.2.0",
|
||||
provenance,
|
||||
rangePrimitives);
|
||||
|
||||
var affectedPackage = new AffectedPackage(
|
||||
type: "semver",
|
||||
identifier: "pkg@1.x",
|
||||
platform: "linux",
|
||||
versionRanges: new[] { versionRange },
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
var advisory = new Advisory(
|
||||
advisoryKey: "ADV-RANGE-1",
|
||||
title: "Sample Range Primitive",
|
||||
summary: "Testing range primitive persistence.",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { "CVE-2025-0001" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[] { affectedPackage },
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var fetched = await store.FindAsync("ADV-RANGE-1", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
var fetchedPackage = Assert.Single(fetched!.AffectedPackages);
|
||||
var fetchedRange = Assert.Single(fetchedPackage.VersionRanges);
|
||||
|
||||
Assert.Equal(versionRange.RangeKind, fetchedRange.RangeKind);
|
||||
Assert.Equal(versionRange.IntroducedVersion, fetchedRange.IntroducedVersion);
|
||||
Assert.Equal(versionRange.FixedVersion, fetchedRange.FixedVersion);
|
||||
Assert.Equal(versionRange.LastAffectedVersion, fetchedRange.LastAffectedVersion);
|
||||
Assert.Equal(versionRange.RangeExpression, fetchedRange.RangeExpression);
|
||||
Assert.Equal(versionRange.Provenance.Source, fetchedRange.Provenance.Source);
|
||||
Assert.Equal(versionRange.Provenance.Kind, fetchedRange.Provenance.Kind);
|
||||
Assert.Equal(versionRange.Provenance.Value, fetchedRange.Provenance.Value);
|
||||
Assert.Equal(versionRange.Provenance.DecisionReason, fetchedRange.Provenance.DecisionReason);
|
||||
Assert.Equal(versionRange.Provenance.RecordedAt, fetchedRange.Provenance.RecordedAt);
|
||||
Assert.True(versionRange.Provenance.FieldMask.SequenceEqual(fetchedRange.Provenance.FieldMask));
|
||||
|
||||
Assert.NotNull(fetchedRange.Primitives);
|
||||
Assert.Equal(rangePrimitives.SemVer, fetchedRange.Primitives!.SemVer);
|
||||
Assert.Equal(rangePrimitives.Nevra, fetchedRange.Primitives.Nevra);
|
||||
Assert.Equal(rangePrimitives.Evr, fetchedRange.Primitives.Evr);
|
||||
Assert.Equal(rangePrimitives.VendorExtensions, fetchedRange.Primitives.VendorExtensions);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_SkipsNormalizedVersionsWhenFeatureDisabled()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = false }),
|
||||
TimeProvider.System);
|
||||
|
||||
var advisory = CreateNormalizedAdvisory("ADV-NORM-DISABLED");
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var document = await _fixture.Database
|
||||
.GetCollection<AdvisoryDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Find(x => x.AdvisoryKey == advisory.AdvisoryKey)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(document);
|
||||
Assert.True(document!.NormalizedVersions is null || document.NormalizedVersions.Count == 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAsync_PopulatesNormalizedVersionsWhenFeatureEnabled()
|
||||
{
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
|
||||
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
|
||||
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
var store = new AdvisoryStore(
|
||||
_fixture.Database,
|
||||
aliasStore,
|
||||
NullLogger<AdvisoryStore>.Instance,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = true }),
|
||||
TimeProvider.System);
|
||||
|
||||
var advisory = CreateNormalizedAdvisory("ADV-NORM-ENABLED");
|
||||
await store.UpsertAsync(advisory, CancellationToken.None);
|
||||
|
||||
var document = await _fixture.Database
|
||||
.GetCollection<AdvisoryDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Find(x => x.AdvisoryKey == advisory.AdvisoryKey)
|
||||
.FirstOrDefaultAsync();
|
||||
|
||||
Assert.NotNull(document);
|
||||
var normalizedCollection = document!.NormalizedVersions;
|
||||
Assert.NotNull(normalizedCollection);
|
||||
var normalized = Assert.Single(normalizedCollection!);
|
||||
Assert.Equal("pkg:npm/example", normalized.PackageId);
|
||||
Assert.Equal(AffectedPackageTypes.SemVer, normalized.PackageType);
|
||||
Assert.Equal(NormalizedVersionSchemes.SemVer, normalized.Scheme);
|
||||
Assert.Equal(NormalizedVersionRuleTypes.Range, normalized.Type);
|
||||
Assert.Equal("range", normalized.Style);
|
||||
Assert.Equal("1.0.0", normalized.Min);
|
||||
Assert.True(normalized.MinInclusive);
|
||||
Assert.Equal("2.0.0", normalized.Max);
|
||||
Assert.False(normalized.MaxInclusive);
|
||||
Assert.Null(normalized.Value);
|
||||
Assert.Equal("ghsa:pkg:npm/example", normalized.Notes);
|
||||
Assert.Equal("range-decision", normalized.DecisionReason);
|
||||
Assert.Equal(">= 1.0.0 < 2.0.0", normalized.Constraint);
|
||||
Assert.Equal("ghsa", normalized.Source);
|
||||
Assert.Equal(new DateTime(2025, 10, 9, 0, 0, 0, DateTimeKind.Utc), normalized.RecordedAtUtc);
|
||||
}
|
||||
|
||||
private static Advisory CreateNormalizedAdvisory(string advisoryKey)
|
||||
{
|
||||
var recordedAt = new DateTimeOffset(2025, 10, 9, 0, 0, 0, TimeSpan.Zero);
|
||||
var rangeProvenance = new AdvisoryProvenance(
|
||||
source: "ghsa",
|
||||
kind: "affected-range",
|
||||
value: "pkg:npm/example",
|
||||
recordedAt: recordedAt,
|
||||
fieldMask: new[] { "affectedpackages[].versionranges[]" },
|
||||
decisionReason: "range-decision");
|
||||
|
||||
var semverPrimitive = new SemVerPrimitive(
|
||||
Introduced: "1.0.0",
|
||||
IntroducedInclusive: true,
|
||||
Fixed: "2.0.0",
|
||||
FixedInclusive: false,
|
||||
LastAffected: null,
|
||||
LastAffectedInclusive: false,
|
||||
ConstraintExpression: ">= 1.0.0 < 2.0.0");
|
||||
|
||||
var normalizedRule = semverPrimitive.ToNormalizedVersionRule("ghsa:pkg:npm/example")!;
|
||||
var versionRange = new AffectedVersionRange(
|
||||
rangeKind: "semver",
|
||||
introducedVersion: "1.0.0",
|
||||
fixedVersion: "2.0.0",
|
||||
lastAffectedVersion: null,
|
||||
rangeExpression: ">= 1.0.0 < 2.0.0",
|
||||
provenance: rangeProvenance,
|
||||
primitives: new RangePrimitives(semverPrimitive, null, null, null));
|
||||
|
||||
var package = new AffectedPackage(
|
||||
type: AffectedPackageTypes.SemVer,
|
||||
identifier: "pkg:npm/example",
|
||||
platform: "npm",
|
||||
versionRanges: new[] { versionRange },
|
||||
statuses: Array.Empty<AffectedPackageStatus>(),
|
||||
provenance: new[] { rangeProvenance },
|
||||
normalizedVersions: new[] { normalizedRule });
|
||||
|
||||
var advisoryProvenance = new AdvisoryProvenance(
|
||||
source: "ghsa",
|
||||
kind: "document",
|
||||
value: advisoryKey,
|
||||
recordedAt: recordedAt,
|
||||
fieldMask: new[] { "advisory" },
|
||||
decisionReason: "document-decision");
|
||||
|
||||
return new Advisory(
|
||||
advisoryKey: advisoryKey,
|
||||
title: "Normalized advisory",
|
||||
summary: "Contains normalized versions for storage testing.",
|
||||
language: "en",
|
||||
published: recordedAt,
|
||||
modified: recordedAt,
|
||||
severity: "medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { $"{advisoryKey}-ALIAS" },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: new[] { package },
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { advisoryProvenance });
|
||||
}
|
||||
|
||||
private async Task DropCollectionAsync(string collectionName)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(collectionName);
|
||||
}
|
||||
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// ignore missing collection
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class AliasStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public AliasStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplaceAsync_UpsertsAliases_AndDetectsCollision()
|
||||
{
|
||||
await DropAliasCollectionAsync();
|
||||
var store = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
|
||||
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
await store.ReplaceAsync(
|
||||
"ADV-1",
|
||||
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-1") },
|
||||
timestamp,
|
||||
CancellationToken.None);
|
||||
|
||||
var firstAliases = await store.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
|
||||
Assert.Contains(firstAliases, record => record.Scheme == "CVE" && record.Value == "CVE-2025-1234");
|
||||
|
||||
var result = await store.ReplaceAsync(
|
||||
"ADV-2",
|
||||
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-2") },
|
||||
timestamp.AddMinutes(1),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotEmpty(result.Collisions);
|
||||
var collision = Assert.Single(result.Collisions);
|
||||
Assert.Equal("CVE", collision.Scheme);
|
||||
Assert.Contains("ADV-1", collision.AdvisoryKeys);
|
||||
Assert.Contains("ADV-2", collision.AdvisoryKeys);
|
||||
}
|
||||
|
||||
private async Task DropAliasCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
|
||||
}
|
||||
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class DocumentStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public DocumentStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndLookupDocument()
|
||||
{
|
||||
var store = new DocumentStore(_fixture.Database, NullLogger<DocumentStore>.Instance);
|
||||
var id = Guid.NewGuid();
|
||||
var record = new DocumentRecord(
|
||||
id,
|
||||
"source",
|
||||
"https://example.com/advisory.json",
|
||||
DateTimeOffset.UtcNow,
|
||||
"sha123",
|
||||
"pending",
|
||||
"application/json",
|
||||
new Dictionary<string, string> { ["etag"] = "abc" },
|
||||
new Dictionary<string, string> { ["note"] = "test" },
|
||||
"etag-value",
|
||||
DateTimeOffset.UtcNow,
|
||||
null,
|
||||
DateTimeOffset.UtcNow.AddDays(30));
|
||||
|
||||
var upserted = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal(id, upserted.Id);
|
||||
|
||||
var fetched = await store.FindBySourceAndUriAsync("source", "https://example.com/advisory.json", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("pending", fetched!.Status);
|
||||
Assert.Equal("test", fetched.Metadata!["note"]);
|
||||
|
||||
var statusUpdated = await store.UpdateStatusAsync(id, "processed", CancellationToken.None);
|
||||
Assert.True(statusUpdated);
|
||||
|
||||
var refreshed = await store.FindAsync(id, CancellationToken.None);
|
||||
Assert.NotNull(refreshed);
|
||||
Assert.Equal("processed", refreshed!.Status);
|
||||
}
|
||||
}
|
||||
40
src/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs
Normal file
40
src/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs
Normal file
@@ -0,0 +1,40 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class DtoStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public DtoStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndLookupDto()
|
||||
{
|
||||
var store = new DtoStore(_fixture.Database, NullLogger<DtoStore>.Instance);
|
||||
var record = new DtoRecord(
|
||||
Guid.NewGuid(),
|
||||
Guid.NewGuid(),
|
||||
"source",
|
||||
"1.0",
|
||||
new BsonDocument("value", 1),
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
var upserted = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal(record.DocumentId, upserted.DocumentId);
|
||||
|
||||
var fetched = await store.FindByDocumentIdAsync(record.DocumentId, CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(1, fetched!.Payload["value"].AsInt32);
|
||||
|
||||
var bySource = await store.GetBySourceAsync("source", 10, CancellationToken.None);
|
||||
Assert.Single(bySource);
|
||||
Assert.Equal(record.DocumentId, bySource[0].DocumentId);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,208 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
public sealed class ExportStateManagerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task StoreFullExportInitializesBaseline()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
var record = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:abcd",
|
||||
cursor: "cursor-1",
|
||||
targetRepository: "registry.local/json",
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("export:json", record.Id);
|
||||
Assert.Equal("20240720T120000Z", record.BaseExportId);
|
||||
Assert.Equal("sha256:abcd", record.BaseDigest);
|
||||
Assert.Equal("sha256:abcd", record.LastFullDigest);
|
||||
Assert.Null(record.LastDeltaDigest);
|
||||
Assert.Equal("cursor-1", record.ExportCursor);
|
||||
Assert.Equal("registry.local/json", record.TargetRepository);
|
||||
Assert.Equal("1.0.0", record.ExporterVersion);
|
||||
Assert.Equal(timeProvider.Now, record.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreFullExport_ResetBaselineOverridesExisting()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:base",
|
||||
cursor: "cursor-base",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
var withoutReset = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120500Z",
|
||||
exportDigest: "sha256:new",
|
||||
cursor: "cursor-new",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.1",
|
||||
resetBaseline: false,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240720T120000Z", withoutReset.BaseExportId);
|
||||
Assert.Equal("sha256:base", withoutReset.BaseDigest);
|
||||
Assert.Equal("sha256:new", withoutReset.LastFullDigest);
|
||||
Assert.Equal("cursor-new", withoutReset.ExportCursor);
|
||||
Assert.Equal(timeProvider.Now, withoutReset.UpdatedAt);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
var reset = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T121000Z",
|
||||
exportDigest: "sha256:final",
|
||||
cursor: "cursor-final",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.2",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240720T121000Z", reset.BaseExportId);
|
||||
Assert.Equal("sha256:final", reset.BaseDigest);
|
||||
Assert.Equal("sha256:final", reset.LastFullDigest);
|
||||
Assert.Null(reset.LastDeltaDigest);
|
||||
Assert.Equal("cursor-final", reset.ExportCursor);
|
||||
Assert.Equal(timeProvider.Now, reset.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreFullExport_ResetsBaselineWhenRepositoryChanges()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-21T08:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240721T080000Z",
|
||||
exportDigest: "sha256:base",
|
||||
cursor: "cursor-base",
|
||||
targetRepository: "registry/v1/json",
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(10));
|
||||
var updated = await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240721T081000Z",
|
||||
exportDigest: "sha256:new",
|
||||
cursor: "cursor-new",
|
||||
targetRepository: "registry/v2/json",
|
||||
exporterVersion: "1.1.0",
|
||||
resetBaseline: false,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("20240721T081000Z", updated.BaseExportId);
|
||||
Assert.Equal("sha256:new", updated.BaseDigest);
|
||||
Assert.Equal("sha256:new", updated.LastFullDigest);
|
||||
Assert.Equal("registry/v2/json", updated.TargetRepository);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreDeltaExportRequiresBaseline()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var manager = new ExportStateManager(store);
|
||||
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() => manager.StoreDeltaExportAsync(
|
||||
exporterId: "export:json",
|
||||
deltaDigest: "sha256:def",
|
||||
cursor: null,
|
||||
exporterVersion: "1.0.1",
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StoreDeltaExportUpdatesExistingState()
|
||||
{
|
||||
var store = new InMemoryExportStateStore();
|
||||
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
|
||||
var manager = new ExportStateManager(store, timeProvider);
|
||||
|
||||
await manager.StoreFullExportAsync(
|
||||
exporterId: "export:json",
|
||||
exportId: "20240720T120000Z",
|
||||
exportDigest: "sha256:abcd",
|
||||
cursor: "cursor-1",
|
||||
targetRepository: null,
|
||||
exporterVersion: "1.0.0",
|
||||
resetBaseline: true,
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(10));
|
||||
var delta = await manager.StoreDeltaExportAsync(
|
||||
exporterId: "export:json",
|
||||
deltaDigest: "sha256:ef01",
|
||||
cursor: "cursor-2",
|
||||
exporterVersion: "1.0.1",
|
||||
manifest: Array.Empty<ExportFileRecord>(),
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal("sha256:ef01", delta.LastDeltaDigest);
|
||||
Assert.Equal("cursor-2", delta.ExportCursor);
|
||||
Assert.Equal("1.0.1", delta.ExporterVersion);
|
||||
Assert.Equal(timeProvider.Now, delta.UpdatedAt);
|
||||
Assert.Equal("sha256:abcd", delta.LastFullDigest);
|
||||
}
|
||||
|
||||
private sealed class InMemoryExportStateStore : IExportStateStore
|
||||
{
|
||||
private readonly Dictionary<string, ExportStateRecord> _records = new(StringComparer.Ordinal);
|
||||
|
||||
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
|
||||
{
|
||||
_records.TryGetValue(id, out var record);
|
||||
return Task.FromResult<ExportStateRecord?>(record);
|
||||
}
|
||||
|
||||
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_records[record.Id] = record;
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestTimeProvider : TimeProvider
|
||||
{
|
||||
public TestTimeProvider(DateTimeOffset start) => Now = start;
|
||||
|
||||
public DateTimeOffset Now { get; private set; }
|
||||
|
||||
public void Advance(TimeSpan delta) => Now = Now.Add(delta);
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => Now;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.Exporting;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class ExportStateStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public ExportStateStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndFetchExportState()
|
||||
{
|
||||
var store = new ExportStateStore(_fixture.Database, NullLogger<ExportStateStore>.Instance);
|
||||
var record = new ExportStateRecord(
|
||||
Id: "json",
|
||||
BaseExportId: "base",
|
||||
BaseDigest: "sha-base",
|
||||
LastFullDigest: "sha-full",
|
||||
LastDeltaDigest: null,
|
||||
ExportCursor: "cursor",
|
||||
TargetRepository: "repo",
|
||||
ExporterVersion: "1.0",
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
Files: Array.Empty<ExportFileRecord>());
|
||||
|
||||
var saved = await store.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.Equal("json", saved.Id);
|
||||
Assert.Empty(saved.Files);
|
||||
|
||||
var fetched = await store.FindAsync("json", CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal("sha-full", fetched!.LastFullDigest);
|
||||
Assert.Empty(fetched.Files);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MergeEventStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MergeEventStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AppendAndReadMergeEvents()
|
||||
{
|
||||
var store = new MergeEventStore(_fixture.Database, NullLogger<MergeEventStore>.Instance);
|
||||
var record = new MergeEventRecord(
|
||||
Guid.NewGuid(),
|
||||
"ADV-1",
|
||||
new byte[] { 0x01 },
|
||||
new byte[] { 0x02 },
|
||||
DateTimeOffset.UtcNow,
|
||||
new List<Guid> { Guid.NewGuid() },
|
||||
Array.Empty<MergeFieldDecision>());
|
||||
|
||||
await store.AppendAsync(record, CancellationToken.None);
|
||||
|
||||
var recent = await store.GetRecentAsync("ADV-1", 10, CancellationToken.None);
|
||||
Assert.Single(recent);
|
||||
Assert.Equal(record.AfterHash, recent[0].AfterHash);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,282 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests.Migrations;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoMigrationRunnerTests
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoMigrationRunnerTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_AppliesPendingMigrationsOnce()
|
||||
{
|
||||
var databaseName = $"concelier-migrations-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new TestMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, migration.ApplyCount);
|
||||
|
||||
var count = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Migrations)
|
||||
.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
|
||||
Assert.Equal(1, count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureDocumentExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-doc-ttl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var options = Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.FromDays(45),
|
||||
RawDocumentRetentionTtlGrace = TimeSpan.FromHours(12),
|
||||
});
|
||||
|
||||
var migration = new EnsureDocumentExpiryIndexesMigration(options);
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document)
|
||||
.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
var ttlIndex = indexList.Single(x => x["name"].AsString == "document_expiresAt_ttl");
|
||||
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
|
||||
Assert.True(ttlIndex["partialFilterExpression"].AsBsonDocument["expiresAt"].AsBsonDocument["$exists"].ToBoolean());
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureDocumentExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-doc-notl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document);
|
||||
var keys = Builders<BsonDocument>.IndexKeys.Ascending("expiresAt");
|
||||
var options = new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "document_expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero,
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("expiresAt", true),
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
|
||||
|
||||
var migration = new EnsureDocumentExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.Zero,
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await collection.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
Assert.DoesNotContain(indexList, x => x["name"].AsString == "document_expiresAt_ttl");
|
||||
var nonTtl = indexList.Single(x => x["name"].AsString == "document_expiresAt");
|
||||
Assert.False(nonTtl.Contains("expireAfterSeconds"));
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureGridFsExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-gridfs-ttl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("documents.files");
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.FromDays(30),
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await database.GetCollection<BsonDocument>("documents.files").Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
var ttlIndex = indexList.Single(x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
|
||||
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureGridFsExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-gridfs-notl-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync("documents.files");
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var collection = database.GetCollection<BsonDocument>("documents.files");
|
||||
var keys = Builders<BsonDocument>.IndexKeys.Ascending("metadata.expiresAt");
|
||||
var options = new CreateIndexOptions<BsonDocument>
|
||||
{
|
||||
Name = "gridfs_files_expiresAt_ttl",
|
||||
ExpireAfter = TimeSpan.Zero,
|
||||
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("metadata.expiresAt", true),
|
||||
};
|
||||
|
||||
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
|
||||
|
||||
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
|
||||
{
|
||||
RawDocumentRetention = TimeSpan.Zero,
|
||||
}));
|
||||
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var indexes = await collection.Indexes.ListAsync();
|
||||
var indexList = await indexes.ToListAsync();
|
||||
|
||||
Assert.DoesNotContain(indexList, x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnsureAdvisoryEventCollectionsMigration_CreatesIndexes()
|
||||
{
|
||||
var databaseName = $"concelier-advisory-events-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryStatements);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.AdvisoryConflicts);
|
||||
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
|
||||
|
||||
try
|
||||
{
|
||||
var migration = new EnsureAdvisoryEventCollectionsMigration();
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
new IMongoMigration[] { migration },
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
await runner.RunAsync(CancellationToken.None);
|
||||
|
||||
var statementIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var statementIndexNames = (await statementIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_statements_vulnerability_asof_desc", statementIndexNames);
|
||||
Assert.Contains("advisory_statements_statementHash_unique", statementIndexNames);
|
||||
|
||||
var conflictIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryConflicts)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var conflictIndexNames = (await conflictIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_conflicts_vulnerability_asof_desc", conflictIndexNames);
|
||||
Assert.Contains("advisory_conflicts_conflictHash_unique", conflictIndexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestMigration : IMongoMigration
|
||||
{
|
||||
public int ApplyCount { get; private set; }
|
||||
|
||||
public string Id => "999_test";
|
||||
|
||||
public string Description => "test migration";
|
||||
|
||||
public Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
|
||||
{
|
||||
ApplyCount++;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Events;
|
||||
using StellaOps.Concelier.Models;
|
||||
using StellaOps.Concelier.Storage.Mongo.Conflicts;
|
||||
using StellaOps.Concelier.Storage.Mongo.Events;
|
||||
using StellaOps.Concelier.Storage.Mongo.Statements;
|
||||
using StellaOps.Concelier.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoAdvisoryEventRepositoryTests
|
||||
{
|
||||
private readonly IMongoDatabase _database;
|
||||
private readonly MongoAdvisoryEventRepository _repository;
|
||||
|
||||
public MongoAdvisoryEventRepositoryTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
|
||||
var statementStore = new AdvisoryStatementStore(_database);
|
||||
var conflictStore = new AdvisoryConflictStore(_database);
|
||||
_repository = new MongoAdvisoryEventRepository(statementStore, conflictStore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndFetchStatements_RoundTripsCanonicalPayload()
|
||||
{
|
||||
var advisory = CreateSampleAdvisory("CVE-2025-7777", "Sample advisory");
|
||||
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
|
||||
var hash = ImmutableArray.Create(SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson)));
|
||||
|
||||
var entry = new AdvisoryStatementEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-7777",
|
||||
"CVE-2025-7777",
|
||||
canonicalJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-19T14:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-19T14:05:00Z"),
|
||||
ImmutableArray<Guid>.Empty);
|
||||
|
||||
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var results = await _repository.GetStatementsAsync("CVE-2025-7777", null, CancellationToken.None);
|
||||
|
||||
var snapshot = Assert.Single(results);
|
||||
Assert.Equal(entry.StatementId, snapshot.StatementId);
|
||||
Assert.Equal(entry.CanonicalJson, snapshot.CanonicalJson);
|
||||
Assert.True(entry.StatementHash.SequenceEqual(snapshot.StatementHash));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InsertAndFetchConflicts_PreservesDetails()
|
||||
{
|
||||
var detailJson = CanonicalJsonSerializer.Serialize(new ConflictPayload("severity", "mismatch"));
|
||||
var hash = ImmutableArray.Create(SHA256.HashData(Encoding.UTF8.GetBytes(detailJson)));
|
||||
var statementIds = ImmutableArray.Create(Guid.NewGuid(), Guid.NewGuid());
|
||||
|
||||
var entry = new AdvisoryConflictEntry(
|
||||
Guid.NewGuid(),
|
||||
"CVE-2025-4242",
|
||||
detailJson,
|
||||
hash,
|
||||
DateTimeOffset.Parse("2025-10-19T15:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-19T15:05:00Z"),
|
||||
statementIds);
|
||||
|
||||
await _repository.InsertConflictsAsync(new[] { entry }, CancellationToken.None);
|
||||
|
||||
var results = await _repository.GetConflictsAsync("CVE-2025-4242", null, CancellationToken.None);
|
||||
|
||||
var conflict = Assert.Single(results);
|
||||
Assert.Equal(entry.CanonicalJson, conflict.CanonicalJson);
|
||||
Assert.True(entry.StatementIds.SequenceEqual(conflict.StatementIds));
|
||||
Assert.True(entry.ConflictHash.SequenceEqual(conflict.ConflictHash));
|
||||
}
|
||||
|
||||
private static Advisory CreateSampleAdvisory(string key, string summary)
|
||||
{
|
||||
var provenance = new AdvisoryProvenance("nvd", "document", key, DateTimeOffset.Parse("2025-10-18T00:00:00Z"), new[] { ProvenanceFieldMasks.Advisory });
|
||||
return new Advisory(
|
||||
key,
|
||||
key,
|
||||
summary,
|
||||
"en",
|
||||
DateTimeOffset.Parse("2025-10-17T00:00:00Z"),
|
||||
DateTimeOffset.Parse("2025-10-18T00:00:00Z"),
|
||||
"medium",
|
||||
exploitKnown: false,
|
||||
aliases: new[] { key },
|
||||
references: Array.Empty<AdvisoryReference>(),
|
||||
affectedPackages: Array.Empty<AffectedPackage>(),
|
||||
cvssMetrics: Array.Empty<CvssMetric>(),
|
||||
provenance: new[] { provenance });
|
||||
}
|
||||
|
||||
private sealed record ConflictPayload(string Type, string Reason);
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Migrations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoBootstrapperTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoBootstrapperTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CreatesNormalizedIndexesWhenSemVerStyleEnabled()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-semver-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = true }),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var indexCursor = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var indexNames = (await indexCursor.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_normalizedVersions_pkg_scheme_type", indexNames);
|
||||
Assert.Contains("advisory_normalizedVersions_value", indexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_DoesNotCreateNormalizedIndexesWhenFeatureDisabled()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-no-semver-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions { EnableSemVerStyle = false }),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var indexCursor = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Advisory)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var indexNames = (await indexCursor.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.DoesNotContain("advisory_normalizedVersions_pkg_scheme_type", indexNames);
|
||||
Assert.DoesNotContain("advisory_normalizedVersions_value", indexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CreatesAdvisoryEventIndexes()
|
||||
{
|
||||
var databaseName = $"concelier-bootstrap-events-{Guid.NewGuid():N}";
|
||||
var database = _fixture.Client.GetDatabase(databaseName);
|
||||
|
||||
try
|
||||
{
|
||||
var runner = new MongoMigrationRunner(
|
||||
database,
|
||||
Array.Empty<IMongoMigration>(),
|
||||
NullLogger<MongoMigrationRunner>.Instance,
|
||||
TimeProvider.System);
|
||||
|
||||
var bootstrapper = new MongoBootstrapper(
|
||||
database,
|
||||
Options.Create(new MongoStorageOptions()),
|
||||
NullLogger<MongoBootstrapper>.Instance,
|
||||
runner);
|
||||
|
||||
await bootstrapper.InitializeAsync(CancellationToken.None);
|
||||
|
||||
var statementIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var statementIndexNames = (await statementIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_statements_vulnerability_asof_desc", statementIndexNames);
|
||||
Assert.Contains("advisory_statements_statementHash_unique", statementIndexNames);
|
||||
|
||||
var conflictIndexes = await database
|
||||
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryConflicts)
|
||||
.Indexes
|
||||
.ListAsync();
|
||||
var conflictIndexNames = (await conflictIndexes.ToListAsync()).Select(x => x["name"].AsString).ToArray();
|
||||
|
||||
Assert.Contains("advisory_conflicts_vulnerability_asof_desc", conflictIndexNames);
|
||||
Assert.Contains("advisory_conflicts_conflictHash_unique", conflictIndexNames);
|
||||
}
|
||||
finally
|
||||
{
|
||||
await _fixture.Client.DropDatabaseAsync(databaseName);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoJobStoreTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoJobStoreTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateStartCompleteLifecycle()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var request = new JobRunCreateRequest(
|
||||
Kind: "mongo:test",
|
||||
Trigger: "unit",
|
||||
Parameters: new Dictionary<string, object?> { ["scope"] = "lifecycle" },
|
||||
ParametersHash: "abc",
|
||||
Timeout: TimeSpan.FromSeconds(5),
|
||||
LeaseDuration: TimeSpan.FromSeconds(2),
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var created = await store.CreateAsync(request, CancellationToken.None);
|
||||
Assert.Equal(JobRunStatus.Pending, created.Status);
|
||||
|
||||
var started = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(started);
|
||||
Assert.Equal(JobRunStatus.Running, started!.Status);
|
||||
|
||||
var completed = await store.TryCompleteAsync(created.RunId, new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
|
||||
Assert.NotNull(completed);
|
||||
Assert.Equal(JobRunStatus.Succeeded, completed!.Status);
|
||||
|
||||
var recent = await store.GetRecentRunsAsync("mongo:test", 10, CancellationToken.None);
|
||||
var snapshot = Assert.Single(recent);
|
||||
Assert.Equal(JobRunStatus.Succeeded, snapshot.Status);
|
||||
|
||||
var active = await store.GetActiveRunsAsync(CancellationToken.None);
|
||||
Assert.Empty(active);
|
||||
|
||||
var last = await store.GetLastRunAsync("mongo:test", CancellationToken.None);
|
||||
Assert.NotNull(last);
|
||||
Assert.Equal(completed.RunId, last!.RunId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task StartAndFailRunHonorsStateTransitions()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var request = new JobRunCreateRequest(
|
||||
Kind: "mongo:failure",
|
||||
Trigger: "unit",
|
||||
Parameters: new Dictionary<string, object?>(),
|
||||
ParametersHash: null,
|
||||
Timeout: null,
|
||||
LeaseDuration: null,
|
||||
CreatedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
var created = await store.CreateAsync(request, CancellationToken.None);
|
||||
var firstStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(firstStart);
|
||||
|
||||
// Second start attempt should be rejected once running.
|
||||
var secondStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow.AddSeconds(1), CancellationToken.None);
|
||||
Assert.Null(secondStart);
|
||||
|
||||
var failure = await store.TryCompleteAsync(
|
||||
created.RunId,
|
||||
new JobRunCompletion(JobRunStatus.Failed, DateTimeOffset.UtcNow.AddSeconds(2), "boom"),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.NotNull(failure);
|
||||
Assert.Equal("boom", failure!.Error);
|
||||
Assert.Equal(JobRunStatus.Failed, failure.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompletingUnknownRunReturnsNull()
|
||||
{
|
||||
await ResetCollectionAsync();
|
||||
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
|
||||
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
|
||||
|
||||
var result = await store.TryCompleteAsync(Guid.NewGuid(), new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
private async Task ResetCollectionAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Jobs);
|
||||
}
|
||||
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Bson;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class MongoSourceStateRepositoryTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public MongoSourceStateRepositoryTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndUpdateCursorFlow()
|
||||
{
|
||||
var repository = new MongoSourceStateRepository(_fixture.Database, NullLogger<MongoSourceStateRepository>.Instance);
|
||||
var sourceName = "nvd";
|
||||
|
||||
var record = new SourceStateRecord(
|
||||
SourceName: sourceName,
|
||||
Enabled: true,
|
||||
Paused: false,
|
||||
Cursor: new BsonDocument("page", 1),
|
||||
LastSuccess: null,
|
||||
LastFailure: null,
|
||||
FailCount: 0,
|
||||
BackoffUntil: null,
|
||||
UpdatedAt: DateTimeOffset.UtcNow,
|
||||
LastFailureReason: null);
|
||||
|
||||
var upserted = await repository.UpsertAsync(record, CancellationToken.None);
|
||||
Assert.True(upserted.Enabled);
|
||||
|
||||
var cursor = new BsonDocument("page", 2);
|
||||
var updated = await repository.UpdateCursorAsync(sourceName, cursor, DateTimeOffset.UtcNow, CancellationToken.None);
|
||||
Assert.NotNull(updated);
|
||||
Assert.Equal(0, updated!.FailCount);
|
||||
Assert.Equal(2, updated.Cursor["page"].AsInt32);
|
||||
|
||||
var failure = await repository.MarkFailureAsync(sourceName, DateTimeOffset.UtcNow, TimeSpan.FromMinutes(5), "network timeout", CancellationToken.None);
|
||||
Assert.NotNull(failure);
|
||||
Assert.Equal(1, failure!.FailCount);
|
||||
Assert.NotNull(failure.BackoffUntil);
|
||||
Assert.Equal("network timeout", failure.LastFailureReason);
|
||||
|
||||
var fetched = await repository.TryGetAsync(sourceName, CancellationToken.None);
|
||||
Assert.NotNull(fetched);
|
||||
Assert.Equal(failure.BackoffUntil, fetched!.BackoffUntil);
|
||||
Assert.Equal("network timeout", fetched.LastFailureReason);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,93 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using MongoDB.Driver.GridFS;
|
||||
using StellaOps.Concelier.Storage.Mongo;
|
||||
using StellaOps.Concelier.Storage.Mongo.Documents;
|
||||
using StellaOps.Concelier.Storage.Mongo.Dtos;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Mongo.Tests;
|
||||
|
||||
[Collection("mongo-fixture")]
|
||||
public sealed class RawDocumentRetentionServiceTests : IClassFixture<MongoIntegrationFixture>
|
||||
{
|
||||
private readonly MongoIntegrationFixture _fixture;
|
||||
|
||||
public RawDocumentRetentionServiceTests(MongoIntegrationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SweepExpiredDocumentsAsync_RemovesExpiredRawDocuments()
|
||||
{
|
||||
var database = _fixture.Database;
|
||||
var documents = database.GetCollection<DocumentDocument>(MongoStorageDefaults.Collections.Document);
|
||||
var dtos = database.GetCollection<DtoDocument>(MongoStorageDefaults.Collections.Dto);
|
||||
var bucket = new GridFSBucket(database, new GridFSBucketOptions { BucketName = "documents" });
|
||||
|
||||
var now = new DateTimeOffset(2024, 10, 1, 12, 0, 0, TimeSpan.Zero);
|
||||
var fakeTime = new FakeTimeProvider(now);
|
||||
|
||||
var options = Options.Create(new MongoStorageOptions
|
||||
{
|
||||
ConnectionString = _fixture.Runner.ConnectionString,
|
||||
DatabaseName = database.DatabaseNamespace.DatabaseName,
|
||||
RawDocumentRetention = TimeSpan.FromDays(1),
|
||||
RawDocumentRetentionTtlGrace = TimeSpan.Zero,
|
||||
RawDocumentRetentionSweepInterval = TimeSpan.FromMinutes(5),
|
||||
});
|
||||
|
||||
var expiredId = Guid.NewGuid().ToString();
|
||||
var gridFsId = await bucket.UploadFromBytesAsync("expired", new byte[] { 1, 2, 3 });
|
||||
await documents.InsertOneAsync(new DocumentDocument
|
||||
{
|
||||
Id = expiredId,
|
||||
SourceName = "nvd",
|
||||
Uri = "https://example.test/cve",
|
||||
FetchedAt = now.AddDays(-2).UtcDateTime,
|
||||
Sha256 = "abc",
|
||||
Status = "pending",
|
||||
ExpiresAt = now.AddMinutes(-5).UtcDateTime,
|
||||
GridFsId = gridFsId,
|
||||
});
|
||||
|
||||
await dtos.InsertOneAsync(new DtoDocument
|
||||
{
|
||||
Id = Guid.NewGuid().ToString(),
|
||||
DocumentId = expiredId,
|
||||
SourceName = "nvd",
|
||||
SchemaVersion = "schema",
|
||||
Payload = new BsonDocument("value", 1),
|
||||
ValidatedAt = now.UtcDateTime,
|
||||
});
|
||||
|
||||
var freshId = Guid.NewGuid().ToString();
|
||||
await documents.InsertOneAsync(new DocumentDocument
|
||||
{
|
||||
Id = freshId,
|
||||
SourceName = "nvd",
|
||||
Uri = "https://example.test/future",
|
||||
FetchedAt = now.UtcDateTime,
|
||||
Sha256 = "def",
|
||||
Status = "pending",
|
||||
ExpiresAt = now.AddHours(1).UtcDateTime,
|
||||
GridFsId = null,
|
||||
});
|
||||
|
||||
var service = new RawDocumentRetentionService(database, options, NullLogger<RawDocumentRetentionService>.Instance, fakeTime);
|
||||
|
||||
var removed = await service.SweepExpiredDocumentsAsync(CancellationToken.None);
|
||||
|
||||
Assert.Equal(1, removed);
|
||||
Assert.Equal(0, await documents.CountDocumentsAsync(d => d.Id == expiredId));
|
||||
Assert.Equal(0, await dtos.CountDocumentsAsync(d => d.DocumentId == expiredId));
|
||||
Assert.Equal(1, await documents.CountDocumentsAsync(d => d.Id == freshId));
|
||||
|
||||
var filter = Builders<GridFSFileInfo>.Filter.Eq("_id", gridFsId);
|
||||
using var cursor = await bucket.FindAsync(filter);
|
||||
Assert.Empty(await cursor.ToListAsync());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user