Initial commit (history squashed)
Some checks failed
Build Test Deploy / authority-container (push) Has been cancelled
Build Test Deploy / docs (push) Has been cancelled
Build Test Deploy / deploy (push) Has been cancelled
Build Test Deploy / build-test (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled

This commit is contained in:
2025-10-07 10:14:21 +03:00
commit b97fc7685a
1132 changed files with 117842 additions and 0 deletions

View File

@@ -0,0 +1,195 @@
using System.Diagnostics;
using System.Linq;
using System.Threading;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Aliases;
using StellaOps.Feedser.Storage.Mongo.Migrations;
using Xunit;
using Xunit.Abstractions;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class AdvisoryStorePerformanceTests : IClassFixture<MongoIntegrationFixture>
{
private const int LargeAdvisoryCount = 30;
private const int AliasesPerAdvisory = 24;
private const int ReferencesPerAdvisory = 180;
private const int AffectedPackagesPerAdvisory = 140;
private const int VersionRangesPerPackage = 4;
private const int CvssMetricsPerAdvisory = 24;
private const int ProvenanceEntriesPerAdvisory = 16;
private static readonly string LargeSummary = new('A', 128 * 1024);
private static readonly DateTimeOffset BasePublished = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly DateTimeOffset BaseRecorded = new(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly TimeSpan TotalBudget = TimeSpan.FromSeconds(28);
private const double UpsertBudgetPerAdvisoryMs = 500;
private const double FetchBudgetPerAdvisoryMs = 200;
private const double FindBudgetPerAdvisoryMs = 200;
private readonly MongoIntegrationFixture _fixture;
private readonly ITestOutputHelper _output;
public AdvisoryStorePerformanceTests(MongoIntegrationFixture fixture, ITestOutputHelper output)
{
_fixture = fixture;
_output = output;
}
[Fact]
public async Task UpsertAndQueryLargeAdvisories_CompletesWithinBudget()
{
var databaseName = $"feedser-performance-{Guid.NewGuid():N}";
var database = _fixture.Client.GetDatabase(databaseName);
try
{
var migrationRunner = new MongoMigrationRunner(
database,
Array.Empty<IMongoMigration>(),
NullLogger<MongoMigrationRunner>.Instance,
TimeProvider.System);
var bootstrapper = new MongoBootstrapper(
database,
Options.Create(new MongoStorageOptions()),
NullLogger<MongoBootstrapper>.Instance,
migrationRunner);
await bootstrapper.InitializeAsync(CancellationToken.None);
var aliasStore = new AliasStore(database, NullLogger<AliasStore>.Instance);
var store = new AdvisoryStore(database, aliasStore, NullLogger<AdvisoryStore>.Instance, TimeProvider.System);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(45));
// Warm up collections (indexes, serialization caches) so perf timings exclude one-time setup work.
var warmup = CreateLargeAdvisory(-1);
await store.UpsertAsync(warmup, cts.Token);
_ = await store.FindAsync(warmup.AdvisoryKey, cts.Token);
_ = await store.GetRecentAsync(1, cts.Token);
var advisories = Enumerable.Range(0, LargeAdvisoryCount)
.Select(CreateLargeAdvisory)
.ToArray();
var upsertWatch = Stopwatch.StartNew();
foreach (var advisory in advisories)
{
await store.UpsertAsync(advisory, cts.Token);
}
upsertWatch.Stop();
var upsertPerAdvisory = upsertWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
var fetchWatch = Stopwatch.StartNew();
var recent = await store.GetRecentAsync(LargeAdvisoryCount, cts.Token);
fetchWatch.Stop();
var fetchPerAdvisory = fetchWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
Assert.Equal(LargeAdvisoryCount, recent.Count);
var findWatch = Stopwatch.StartNew();
foreach (var advisory in advisories)
{
var fetched = await store.FindAsync(advisory.AdvisoryKey, cts.Token);
Assert.NotNull(fetched);
}
findWatch.Stop();
var findPerAdvisory = findWatch.Elapsed.TotalMilliseconds / LargeAdvisoryCount;
var totalElapsed = upsertWatch.Elapsed + fetchWatch.Elapsed + findWatch.Elapsed;
_output.WriteLine($"Upserted {LargeAdvisoryCount} large advisories in {upsertWatch.Elapsed} ({upsertPerAdvisory:F2} ms/doc).");
_output.WriteLine($"Fetched recent advisories in {fetchWatch.Elapsed} ({fetchPerAdvisory:F2} ms/doc).");
_output.WriteLine($"Looked up advisories individually in {findWatch.Elapsed} ({findPerAdvisory:F2} ms/doc).");
_output.WriteLine($"Total elapsed {totalElapsed}.");
Assert.True(upsertPerAdvisory <= UpsertBudgetPerAdvisoryMs, $"Upsert exceeded {UpsertBudgetPerAdvisoryMs} ms per advisory: {upsertPerAdvisory:F2} ms.");
Assert.True(fetchPerAdvisory <= FetchBudgetPerAdvisoryMs, $"GetRecent exceeded {FetchBudgetPerAdvisoryMs} ms per advisory: {fetchPerAdvisory:F2} ms.");
Assert.True(findPerAdvisory <= FindBudgetPerAdvisoryMs, $"Find exceeded {FindBudgetPerAdvisoryMs} ms per advisory: {findPerAdvisory:F2} ms.");
Assert.True(totalElapsed <= TotalBudget, $"Mongo advisory operations exceeded total budget {TotalBudget}: {totalElapsed}.");
}
finally
{
await _fixture.Client.DropDatabaseAsync(databaseName);
}
}
private static Advisory CreateLargeAdvisory(int index)
{
var baseKey = $"ADV-LARGE-{index:D4}";
var published = BasePublished.AddDays(index);
var modified = published.AddHours(6);
var aliases = Enumerable.Range(0, AliasesPerAdvisory)
.Select(i => $"ALIAS-{baseKey}-{i:D4}")
.ToArray();
var provenance = Enumerable.Range(0, ProvenanceEntriesPerAdvisory)
.Select(i => new AdvisoryProvenance(
source: i % 2 == 0 ? "nvd" : "vendor",
kind: i % 3 == 0 ? "normalized" : "enriched",
value: $"prov-{baseKey}-{i:D3}",
recordedAt: BaseRecorded.AddDays(i)))
.ToArray();
var references = Enumerable.Range(0, ReferencesPerAdvisory)
.Select(i => new AdvisoryReference(
url: $"https://vuln.example.com/{baseKey}/ref/{i:D4}",
kind: i % 2 == 0 ? "advisory" : "article",
sourceTag: $"tag-{i % 7}",
summary: $"Reference {baseKey} #{i}",
provenance: provenance[i % provenance.Length]))
.ToArray();
var affectedPackages = Enumerable.Range(0, AffectedPackagesPerAdvisory)
.Select(i => new AffectedPackage(
type: i % 3 == 0 ? AffectedPackageTypes.Rpm : AffectedPackageTypes.Deb,
identifier: $"pkg/{baseKey}/{i:D4}",
platform: i % 4 == 0 ? "linux/x86_64" : "linux/aarch64",
versionRanges: Enumerable.Range(0, VersionRangesPerPackage)
.Select(r => new AffectedVersionRange(
rangeKind: r % 2 == 0 ? "semver" : "evr",
introducedVersion: $"1.{index}.{i}.{r}",
fixedVersion: $"2.{index}.{i}.{r}",
lastAffectedVersion: $"1.{index}.{i}.{r}",
rangeExpression: $">=1.{index}.{i}.{r} <2.{index}.{i}.{r}",
provenance: provenance[(i + r) % provenance.Length]))
.ToArray(),
statuses: Array.Empty<AffectedPackageStatus>(),
provenance: new[]
{
provenance[i % provenance.Length],
provenance[(i + 3) % provenance.Length],
}))
.ToArray();
var cvssMetrics = Enumerable.Range(0, CvssMetricsPerAdvisory)
.Select(i => new CvssMetric(
version: i % 2 == 0 ? "3.1" : "2.0",
vector: $"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:{(i % 3 == 0 ? "H" : "L")}",
baseScore: Math.Max(0, 9.8 - i * 0.2),
baseSeverity: i % 3 == 0 ? "critical" : "high",
provenance: provenance[i % provenance.Length]))
.ToArray();
return new Advisory(
advisoryKey: baseKey,
title: $"Large advisory {baseKey}",
summary: LargeSummary,
language: "en",
published: published,
modified: modified,
severity: "critical",
exploitKnown: index % 2 == 0,
aliases: aliases,
references: references,
affectedPackages: affectedPackages,
cvssMetrics: cvssMetrics,
provenance: provenance);
}
}

View File

@@ -0,0 +1,162 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Logging.Abstractions;
using MongoDB.Driver;
using StellaOps.Feedser.Models;
using StellaOps.Feedser.Storage.Mongo.Advisories;
using StellaOps.Feedser.Storage.Mongo.Aliases;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class AdvisoryStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public AdvisoryStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task UpsertAndFetchAdvisory()
{
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
var store = new AdvisoryStore(_fixture.Database, aliasStore, NullLogger<AdvisoryStore>.Instance, TimeProvider.System);
var advisory = new Advisory(
advisoryKey: "ADV-1",
title: "Sample Advisory",
summary: "Demo",
language: "en",
published: DateTimeOffset.UtcNow,
modified: DateTimeOffset.UtcNow,
severity: "medium",
exploitKnown: false,
aliases: new[] { "ALIAS-1" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
await store.UpsertAsync(advisory, CancellationToken.None);
var fetched = await store.FindAsync("ADV-1", CancellationToken.None);
Assert.NotNull(fetched);
Assert.Equal(advisory.AdvisoryKey, fetched!.AdvisoryKey);
var recent = await store.GetRecentAsync(5, CancellationToken.None);
Assert.NotEmpty(recent);
var aliases = await aliasStore.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
Assert.Contains(aliases, record => record.Scheme == AliasStoreConstants.PrimaryScheme && record.Value == "ADV-1");
Assert.Contains(aliases, record => record.Value == "ALIAS-1");
}
[Fact]
public async Task RangePrimitives_RoundTripThroughMongo()
{
await DropCollectionAsync(MongoStorageDefaults.Collections.Advisory);
await DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
var aliasStore = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
var store = new AdvisoryStore(_fixture.Database, aliasStore, NullLogger<AdvisoryStore>.Instance, TimeProvider.System);
var recordedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
var provenance = new AdvisoryProvenance("source-x", "mapper", "payload-123", recordedAt);
var rangePrimitives = new RangePrimitives(
new SemVerPrimitive(
Introduced: "1.0.0",
IntroducedInclusive: true,
Fixed: "1.2.0",
FixedInclusive: false,
LastAffected: "1.1.5",
LastAffectedInclusive: true,
ConstraintExpression: ">=1.0.0 <1.2.0"),
new NevraPrimitive(
Introduced: new NevraComponent("pkg", 0, "1.0.0", "1", "x86_64"),
Fixed: new NevraComponent("pkg", 1, "1.2.0", "2", "x86_64"),
LastAffected: null),
new EvrPrimitive(
Introduced: new EvrComponent(1, "1.0.0", "1"),
Fixed: null,
LastAffected: new EvrComponent(1, "1.1.5", null)),
new Dictionary<string, string>(StringComparer.Ordinal)
{
["channel"] = "stable",
["notesHash"] = "abc123",
});
var versionRange = new AffectedVersionRange(
rangeKind: "semver",
introducedVersion: "1.0.0",
fixedVersion: "1.2.0",
lastAffectedVersion: "1.1.5",
rangeExpression: ">=1.0.0 <1.2.0",
provenance,
rangePrimitives);
var affectedPackage = new AffectedPackage(
type: "semver",
identifier: "pkg@1.x",
platform: "linux",
versionRanges: new[] { versionRange },
statuses: Array.Empty<AffectedPackageStatus>(),
provenance: new[] { provenance });
var advisory = new Advisory(
advisoryKey: "ADV-RANGE-1",
title: "Sample Range Primitive",
summary: "Testing range primitive persistence.",
language: "en",
published: recordedAt,
modified: recordedAt,
severity: "medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-0001" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: new[] { affectedPackage },
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
await store.UpsertAsync(advisory, CancellationToken.None);
var fetched = await store.FindAsync("ADV-RANGE-1", CancellationToken.None);
Assert.NotNull(fetched);
var fetchedPackage = Assert.Single(fetched!.AffectedPackages);
var fetchedRange = Assert.Single(fetchedPackage.VersionRanges);
Assert.Equal(versionRange.RangeKind, fetchedRange.RangeKind);
Assert.Equal(versionRange.IntroducedVersion, fetchedRange.IntroducedVersion);
Assert.Equal(versionRange.FixedVersion, fetchedRange.FixedVersion);
Assert.Equal(versionRange.LastAffectedVersion, fetchedRange.LastAffectedVersion);
Assert.Equal(versionRange.RangeExpression, fetchedRange.RangeExpression);
Assert.Equal(versionRange.Provenance.Source, fetchedRange.Provenance.Source);
Assert.Equal(versionRange.Provenance.Kind, fetchedRange.Provenance.Kind);
Assert.Equal(versionRange.Provenance.Value, fetchedRange.Provenance.Value);
Assert.Equal(versionRange.Provenance.DecisionReason, fetchedRange.Provenance.DecisionReason);
Assert.Equal(versionRange.Provenance.RecordedAt, fetchedRange.Provenance.RecordedAt);
Assert.True(versionRange.Provenance.FieldMask.SequenceEqual(fetchedRange.Provenance.FieldMask));
Assert.NotNull(fetchedRange.Primitives);
Assert.Equal(rangePrimitives.SemVer, fetchedRange.Primitives!.SemVer);
Assert.Equal(rangePrimitives.Nevra, fetchedRange.Primitives.Nevra);
Assert.Equal(rangePrimitives.Evr, fetchedRange.Primitives.Evr);
Assert.Equal(rangePrimitives.VendorExtensions, fetchedRange.Primitives.VendorExtensions);
}
private async Task DropCollectionAsync(string collectionName)
{
try
{
await _fixture.Database.DropCollectionAsync(collectionName);
}
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
{
// ignore missing collection
}
}
}

View File

@@ -0,0 +1,60 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using MongoDB.Driver;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Aliases;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class AliasStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public AliasStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task ReplaceAsync_UpsertsAliases_AndDetectsCollision()
{
await DropAliasCollectionAsync();
var store = new AliasStore(_fixture.Database, NullLogger<AliasStore>.Instance);
var timestamp = DateTimeOffset.UtcNow;
await store.ReplaceAsync(
"ADV-1",
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-1") },
timestamp,
CancellationToken.None);
var firstAliases = await store.GetByAdvisoryAsync("ADV-1", CancellationToken.None);
Assert.Contains(firstAliases, record => record.Scheme == "CVE" && record.Value == "CVE-2025-1234");
var result = await store.ReplaceAsync(
"ADV-2",
new[] { new AliasEntry("CVE", "CVE-2025-1234"), new AliasEntry(AliasStoreConstants.PrimaryScheme, "ADV-2") },
timestamp.AddMinutes(1),
CancellationToken.None);
Assert.NotEmpty(result.Collisions);
var collision = Assert.Single(result.Collisions);
Assert.Equal("CVE", collision.Scheme);
Assert.Contains("ADV-1", collision.AdvisoryKeys);
Assert.Contains("ADV-2", collision.AdvisoryKeys);
}
private async Task DropAliasCollectionAsync()
{
try
{
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Alias);
}
catch (MongoDB.Driver.MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
{
}
}
}

View File

@@ -0,0 +1,51 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Feedser.Storage.Mongo.Documents;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class DocumentStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public DocumentStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task UpsertAndLookupDocument()
{
var store = new DocumentStore(_fixture.Database, NullLogger<DocumentStore>.Instance);
var id = Guid.NewGuid();
var record = new DocumentRecord(
id,
"source",
"https://example.com/advisory.json",
DateTimeOffset.UtcNow,
"sha123",
"pending",
"application/json",
new Dictionary<string, string> { ["etag"] = "abc" },
new Dictionary<string, string> { ["note"] = "test" },
"etag-value",
DateTimeOffset.UtcNow,
null,
DateTimeOffset.UtcNow.AddDays(30));
var upserted = await store.UpsertAsync(record, CancellationToken.None);
Assert.Equal(id, upserted.Id);
var fetched = await store.FindBySourceAndUriAsync("source", "https://example.com/advisory.json", CancellationToken.None);
Assert.NotNull(fetched);
Assert.Equal("pending", fetched!.Status);
Assert.Equal("test", fetched.Metadata!["note"]);
var statusUpdated = await store.UpdateStatusAsync(id, "processed", CancellationToken.None);
Assert.True(statusUpdated);
var refreshed = await store.FindAsync(id, CancellationToken.None);
Assert.NotNull(refreshed);
Assert.Equal("processed", refreshed!.Status);
}
}

View File

@@ -0,0 +1,40 @@
using Microsoft.Extensions.Logging.Abstractions;
using MongoDB.Bson;
using StellaOps.Feedser.Storage.Mongo.Dtos;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class DtoStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public DtoStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task UpsertAndLookupDto()
{
var store = new DtoStore(_fixture.Database, NullLogger<DtoStore>.Instance);
var record = new DtoRecord(
Guid.NewGuid(),
Guid.NewGuid(),
"source",
"1.0",
new BsonDocument("value", 1),
DateTimeOffset.UtcNow);
var upserted = await store.UpsertAsync(record, CancellationToken.None);
Assert.Equal(record.DocumentId, upserted.DocumentId);
var fetched = await store.FindByDocumentIdAsync(record.DocumentId, CancellationToken.None);
Assert.NotNull(fetched);
Assert.Equal(1, fetched!.Payload["value"].AsInt32);
var bySource = await store.GetBySourceAsync("source", 10, CancellationToken.None);
Assert.Single(bySource);
Assert.Equal(record.DocumentId, bySource[0].DocumentId);
}
}

View File

@@ -0,0 +1,208 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Feedser.Storage.Mongo.Exporting;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
public sealed class ExportStateManagerTests
{
[Fact]
public async Task StoreFullExportInitializesBaseline()
{
var store = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
var manager = new ExportStateManager(store, timeProvider);
var record = await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240720T120000Z",
exportDigest: "sha256:abcd",
cursor: "cursor-1",
targetRepository: "registry.local/json",
exporterVersion: "1.0.0",
resetBaseline: true,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
Assert.Equal("export:json", record.Id);
Assert.Equal("20240720T120000Z", record.BaseExportId);
Assert.Equal("sha256:abcd", record.BaseDigest);
Assert.Equal("sha256:abcd", record.LastFullDigest);
Assert.Null(record.LastDeltaDigest);
Assert.Equal("cursor-1", record.ExportCursor);
Assert.Equal("registry.local/json", record.TargetRepository);
Assert.Equal("1.0.0", record.ExporterVersion);
Assert.Equal(timeProvider.Now, record.UpdatedAt);
}
[Fact]
public async Task StoreFullExport_ResetBaselineOverridesExisting()
{
var store = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
var manager = new ExportStateManager(store, timeProvider);
await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240720T120000Z",
exportDigest: "sha256:base",
cursor: "cursor-base",
targetRepository: null,
exporterVersion: "1.0.0",
resetBaseline: true,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
timeProvider.Advance(TimeSpan.FromMinutes(5));
var withoutReset = await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240720T120500Z",
exportDigest: "sha256:new",
cursor: "cursor-new",
targetRepository: null,
exporterVersion: "1.0.1",
resetBaseline: false,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
Assert.Equal("20240720T120000Z", withoutReset.BaseExportId);
Assert.Equal("sha256:base", withoutReset.BaseDigest);
Assert.Equal("sha256:new", withoutReset.LastFullDigest);
Assert.Equal("cursor-new", withoutReset.ExportCursor);
Assert.Equal(timeProvider.Now, withoutReset.UpdatedAt);
timeProvider.Advance(TimeSpan.FromMinutes(5));
var reset = await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240720T121000Z",
exportDigest: "sha256:final",
cursor: "cursor-final",
targetRepository: null,
exporterVersion: "1.0.2",
resetBaseline: true,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
Assert.Equal("20240720T121000Z", reset.BaseExportId);
Assert.Equal("sha256:final", reset.BaseDigest);
Assert.Equal("sha256:final", reset.LastFullDigest);
Assert.Null(reset.LastDeltaDigest);
Assert.Equal("cursor-final", reset.ExportCursor);
Assert.Equal(timeProvider.Now, reset.UpdatedAt);
}
[Fact]
public async Task StoreFullExport_ResetsBaselineWhenRepositoryChanges()
{
var store = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-21T08:00:00Z"));
var manager = new ExportStateManager(store, timeProvider);
await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240721T080000Z",
exportDigest: "sha256:base",
cursor: "cursor-base",
targetRepository: "registry/v1/json",
exporterVersion: "1.0.0",
resetBaseline: true,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
timeProvider.Advance(TimeSpan.FromMinutes(10));
var updated = await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240721T081000Z",
exportDigest: "sha256:new",
cursor: "cursor-new",
targetRepository: "registry/v2/json",
exporterVersion: "1.1.0",
resetBaseline: false,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
Assert.Equal("20240721T081000Z", updated.BaseExportId);
Assert.Equal("sha256:new", updated.BaseDigest);
Assert.Equal("sha256:new", updated.LastFullDigest);
Assert.Equal("registry/v2/json", updated.TargetRepository);
}
[Fact]
public async Task StoreDeltaExportRequiresBaseline()
{
var store = new InMemoryExportStateStore();
var manager = new ExportStateManager(store);
await Assert.ThrowsAsync<InvalidOperationException>(() => manager.StoreDeltaExportAsync(
exporterId: "export:json",
deltaDigest: "sha256:def",
cursor: null,
exporterVersion: "1.0.1",
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None));
}
[Fact]
public async Task StoreDeltaExportUpdatesExistingState()
{
var store = new InMemoryExportStateStore();
var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2024-07-20T12:00:00Z"));
var manager = new ExportStateManager(store, timeProvider);
await manager.StoreFullExportAsync(
exporterId: "export:json",
exportId: "20240720T120000Z",
exportDigest: "sha256:abcd",
cursor: "cursor-1",
targetRepository: null,
exporterVersion: "1.0.0",
resetBaseline: true,
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
timeProvider.Advance(TimeSpan.FromMinutes(10));
var delta = await manager.StoreDeltaExportAsync(
exporterId: "export:json",
deltaDigest: "sha256:ef01",
cursor: "cursor-2",
exporterVersion: "1.0.1",
manifest: Array.Empty<ExportFileRecord>(),
cancellationToken: CancellationToken.None);
Assert.Equal("sha256:ef01", delta.LastDeltaDigest);
Assert.Equal("cursor-2", delta.ExportCursor);
Assert.Equal("1.0.1", delta.ExporterVersion);
Assert.Equal(timeProvider.Now, delta.UpdatedAt);
Assert.Equal("sha256:abcd", delta.LastFullDigest);
}
private sealed class InMemoryExportStateStore : IExportStateStore
{
private readonly Dictionary<string, ExportStateRecord> _records = new(StringComparer.Ordinal);
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
{
_records.TryGetValue(id, out var record);
return Task.FromResult<ExportStateRecord?>(record);
}
public Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
_records[record.Id] = record;
return Task.FromResult(record);
}
}
private sealed class TestTimeProvider : TimeProvider
{
public TestTimeProvider(DateTimeOffset start) => Now = start;
public DateTimeOffset Now { get; private set; }
public void Advance(TimeSpan delta) => Now = Now.Add(delta);
public override DateTimeOffset GetUtcNow() => Now;
}
}

View File

@@ -0,0 +1,42 @@
using System;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Feedser.Storage.Mongo.Exporting;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class ExportStateStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public ExportStateStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task UpsertAndFetchExportState()
{
var store = new ExportStateStore(_fixture.Database, NullLogger<ExportStateStore>.Instance);
var record = new ExportStateRecord(
Id: "json",
BaseExportId: "base",
BaseDigest: "sha-base",
LastFullDigest: "sha-full",
LastDeltaDigest: null,
ExportCursor: "cursor",
TargetRepository: "repo",
ExporterVersion: "1.0",
UpdatedAt: DateTimeOffset.UtcNow,
Files: Array.Empty<ExportFileRecord>());
var saved = await store.UpsertAsync(record, CancellationToken.None);
Assert.Equal("json", saved.Id);
Assert.Empty(saved.Files);
var fetched = await store.FindAsync("json", CancellationToken.None);
Assert.NotNull(fetched);
Assert.Equal("sha-full", fetched!.LastFullDigest);
Assert.Empty(fetched.Files);
}
}

View File

@@ -0,0 +1,35 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Feedser.Storage.Mongo.MergeEvents;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class MergeEventStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public MergeEventStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task AppendAndReadMergeEvents()
{
var store = new MergeEventStore(_fixture.Database, NullLogger<MergeEventStore>.Instance);
var record = new MergeEventRecord(
Guid.NewGuid(),
"ADV-1",
new byte[] { 0x01 },
new byte[] { 0x02 },
DateTimeOffset.UtcNow,
new List<Guid> { Guid.NewGuid() },
Array.Empty<MergeFieldDecision>());
await store.AppendAsync(record, CancellationToken.None);
var recent = await store.GetRecentAsync("ADV-1", 10, CancellationToken.None);
Assert.Single(recent);
Assert.Equal(record.AfterHash, recent[0].AfterHash);
}
}

View File

@@ -0,0 +1,238 @@
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Migrations;
using Xunit;
namespace StellaOps.Feedser.Storage.Mongo.Tests.Migrations;
[Collection("mongo-fixture")]
public sealed class MongoMigrationRunnerTests
{
private readonly MongoIntegrationFixture _fixture;
public MongoMigrationRunnerTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task RunAsync_AppliesPendingMigrationsOnce()
{
var databaseName = $"feedser-migrations-{Guid.NewGuid():N}";
var database = _fixture.Client.GetDatabase(databaseName);
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
try
{
var migration = new TestMigration();
var runner = new MongoMigrationRunner(
database,
new IMongoMigration[] { migration },
NullLogger<MongoMigrationRunner>.Instance,
TimeProvider.System);
await runner.RunAsync(CancellationToken.None);
await runner.RunAsync(CancellationToken.None);
Assert.Equal(1, migration.ApplyCount);
var count = await database
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Migrations)
.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty);
Assert.Equal(1, count);
}
finally
{
await _fixture.Client.DropDatabaseAsync(databaseName);
}
}
[Fact]
public async Task EnsureDocumentExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
{
var databaseName = $"feedser-doc-ttl-{Guid.NewGuid():N}";
var database = _fixture.Client.GetDatabase(databaseName);
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
try
{
var options = Options.Create(new MongoStorageOptions
{
RawDocumentRetention = TimeSpan.FromDays(45),
RawDocumentRetentionTtlGrace = TimeSpan.FromHours(12),
});
var migration = new EnsureDocumentExpiryIndexesMigration(options);
var runner = new MongoMigrationRunner(
database,
new IMongoMigration[] { migration },
NullLogger<MongoMigrationRunner>.Instance,
TimeProvider.System);
await runner.RunAsync(CancellationToken.None);
var indexes = await database
.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document)
.Indexes.ListAsync();
var indexList = await indexes.ToListAsync();
var ttlIndex = indexList.Single(x => x["name"].AsString == "document_expiresAt_ttl");
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
Assert.True(ttlIndex["partialFilterExpression"].AsBsonDocument["expiresAt"].AsBsonDocument["$exists"].ToBoolean());
}
finally
{
await _fixture.Client.DropDatabaseAsync(databaseName);
}
}
[Fact]
public async Task EnsureDocumentExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
{
var databaseName = $"feedser-doc-notl-{Guid.NewGuid():N}";
var database = _fixture.Client.GetDatabase(databaseName);
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Document);
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
try
{
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.Document);
var keys = Builders<BsonDocument>.IndexKeys.Ascending("expiresAt");
var options = new CreateIndexOptions<BsonDocument>
{
Name = "document_expiresAt_ttl",
ExpireAfter = TimeSpan.Zero,
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("expiresAt", true),
};
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
var migration = new EnsureDocumentExpiryIndexesMigration(Options.Create(new MongoStorageOptions
{
RawDocumentRetention = TimeSpan.Zero,
}));
var runner = new MongoMigrationRunner(
database,
new IMongoMigration[] { migration },
NullLogger<MongoMigrationRunner>.Instance,
TimeProvider.System);
await runner.RunAsync(CancellationToken.None);
var indexes = await collection.Indexes.ListAsync();
var indexList = await indexes.ToListAsync();
Assert.DoesNotContain(indexList, x => x["name"].AsString == "document_expiresAt_ttl");
var nonTtl = indexList.Single(x => x["name"].AsString == "document_expiresAt");
Assert.False(nonTtl.Contains("expireAfterSeconds"));
}
finally
{
await _fixture.Client.DropDatabaseAsync(databaseName);
}
}
[Fact]
public async Task EnsureGridFsExpiryIndexesMigration_CreatesTtlIndexWhenRetentionEnabled()
{
var databaseName = $"feedser-gridfs-ttl-{Guid.NewGuid():N}";
var database = _fixture.Client.GetDatabase(databaseName);
await database.CreateCollectionAsync("documents.files");
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
try
{
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
{
RawDocumentRetention = TimeSpan.FromDays(30),
}));
var runner = new MongoMigrationRunner(
database,
new IMongoMigration[] { migration },
NullLogger<MongoMigrationRunner>.Instance,
TimeProvider.System);
await runner.RunAsync(CancellationToken.None);
var indexes = await database.GetCollection<BsonDocument>("documents.files").Indexes.ListAsync();
var indexList = await indexes.ToListAsync();
var ttlIndex = indexList.Single(x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
Assert.Equal(0, ttlIndex["expireAfterSeconds"].ToDouble());
}
finally
{
await _fixture.Client.DropDatabaseAsync(databaseName);
}
}
[Fact]
public async Task EnsureGridFsExpiryIndexesMigration_DropsTtlIndexWhenRetentionDisabled()
{
var databaseName = $"feedser-gridfs-notl-{Guid.NewGuid():N}";
var database = _fixture.Client.GetDatabase(databaseName);
await database.CreateCollectionAsync("documents.files");
await database.CreateCollectionAsync(MongoStorageDefaults.Collections.Migrations);
try
{
var collection = database.GetCollection<BsonDocument>("documents.files");
var keys = Builders<BsonDocument>.IndexKeys.Ascending("metadata.expiresAt");
var options = new CreateIndexOptions<BsonDocument>
{
Name = "gridfs_files_expiresAt_ttl",
ExpireAfter = TimeSpan.Zero,
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("metadata.expiresAt", true),
};
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options));
var migration = new EnsureGridFsExpiryIndexesMigration(Options.Create(new MongoStorageOptions
{
RawDocumentRetention = TimeSpan.Zero,
}));
var runner = new MongoMigrationRunner(
database,
new IMongoMigration[] { migration },
NullLogger<MongoMigrationRunner>.Instance,
TimeProvider.System);
await runner.RunAsync(CancellationToken.None);
var indexes = await collection.Indexes.ListAsync();
var indexList = await indexes.ToListAsync();
Assert.DoesNotContain(indexList, x => x["name"].AsString == "gridfs_files_expiresAt_ttl");
}
finally
{
await _fixture.Client.DropDatabaseAsync(databaseName);
}
}
private sealed class TestMigration : IMongoMigration
{
public int ApplyCount { get; private set; }
public string Id => "999_test";
public string Description => "test migration";
public Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ApplyCount++;
return Task.CompletedTask;
}
}
}

View File

@@ -0,0 +1,113 @@
using Microsoft.Extensions.Logging.Abstractions;
using MongoDB.Driver;
using StellaOps.Feedser.Core.Jobs;
using StellaOps.Feedser.Storage.Mongo;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class MongoJobStoreTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public MongoJobStoreTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task CreateStartCompleteLifecycle()
{
await ResetCollectionAsync();
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
var request = new JobRunCreateRequest(
Kind: "mongo:test",
Trigger: "unit",
Parameters: new Dictionary<string, object?> { ["scope"] = "lifecycle" },
ParametersHash: "abc",
Timeout: TimeSpan.FromSeconds(5),
LeaseDuration: TimeSpan.FromSeconds(2),
CreatedAt: DateTimeOffset.UtcNow);
var created = await store.CreateAsync(request, CancellationToken.None);
Assert.Equal(JobRunStatus.Pending, created.Status);
var started = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
Assert.NotNull(started);
Assert.Equal(JobRunStatus.Running, started!.Status);
var completed = await store.TryCompleteAsync(created.RunId, new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
Assert.NotNull(completed);
Assert.Equal(JobRunStatus.Succeeded, completed!.Status);
var recent = await store.GetRecentRunsAsync("mongo:test", 10, CancellationToken.None);
var snapshot = Assert.Single(recent);
Assert.Equal(JobRunStatus.Succeeded, snapshot.Status);
var active = await store.GetActiveRunsAsync(CancellationToken.None);
Assert.Empty(active);
var last = await store.GetLastRunAsync("mongo:test", CancellationToken.None);
Assert.NotNull(last);
Assert.Equal(completed.RunId, last!.RunId);
}
[Fact]
public async Task StartAndFailRunHonorsStateTransitions()
{
await ResetCollectionAsync();
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
var request = new JobRunCreateRequest(
Kind: "mongo:failure",
Trigger: "unit",
Parameters: new Dictionary<string, object?>(),
ParametersHash: null,
Timeout: null,
LeaseDuration: null,
CreatedAt: DateTimeOffset.UtcNow);
var created = await store.CreateAsync(request, CancellationToken.None);
var firstStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow, CancellationToken.None);
Assert.NotNull(firstStart);
// Second start attempt should be rejected once running.
var secondStart = await store.TryStartAsync(created.RunId, DateTimeOffset.UtcNow.AddSeconds(1), CancellationToken.None);
Assert.Null(secondStart);
var failure = await store.TryCompleteAsync(
created.RunId,
new JobRunCompletion(JobRunStatus.Failed, DateTimeOffset.UtcNow.AddSeconds(2), "boom"),
CancellationToken.None);
Assert.NotNull(failure);
Assert.Equal("boom", failure!.Error);
Assert.Equal(JobRunStatus.Failed, failure.Status);
}
[Fact]
public async Task CompletingUnknownRunReturnsNull()
{
await ResetCollectionAsync();
var collection = _fixture.Database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
var store = new MongoJobStore(collection, NullLogger<MongoJobStore>.Instance);
var result = await store.TryCompleteAsync(Guid.NewGuid(), new JobRunCompletion(JobRunStatus.Succeeded, DateTimeOffset.UtcNow, null), CancellationToken.None);
Assert.Null(result);
}
private async Task ResetCollectionAsync()
{
try
{
await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.Jobs);
}
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
{
}
}
}

View File

@@ -0,0 +1,55 @@
using Microsoft.Extensions.Logging.Abstractions;
using MongoDB.Bson;
using StellaOps.Feedser.Storage.Mongo;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class MongoSourceStateRepositoryTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public MongoSourceStateRepositoryTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task UpsertAndUpdateCursorFlow()
{
var repository = new MongoSourceStateRepository(_fixture.Database, NullLogger<MongoSourceStateRepository>.Instance);
var sourceName = "nvd";
var record = new SourceStateRecord(
SourceName: sourceName,
Enabled: true,
Paused: false,
Cursor: new BsonDocument("page", 1),
LastSuccess: null,
LastFailure: null,
FailCount: 0,
BackoffUntil: null,
UpdatedAt: DateTimeOffset.UtcNow,
LastFailureReason: null);
var upserted = await repository.UpsertAsync(record, CancellationToken.None);
Assert.True(upserted.Enabled);
var cursor = new BsonDocument("page", 2);
var updated = await repository.UpdateCursorAsync(sourceName, cursor, DateTimeOffset.UtcNow, CancellationToken.None);
Assert.NotNull(updated);
Assert.Equal(0, updated!.FailCount);
Assert.Equal(2, updated.Cursor["page"].AsInt32);
var failure = await repository.MarkFailureAsync(sourceName, DateTimeOffset.UtcNow, TimeSpan.FromMinutes(5), "network timeout", CancellationToken.None);
Assert.NotNull(failure);
Assert.Equal(1, failure!.FailCount);
Assert.NotNull(failure.BackoffUntil);
Assert.Equal("network timeout", failure.LastFailureReason);
var fetched = await repository.TryGetAsync(sourceName, CancellationToken.None);
Assert.NotNull(fetched);
Assert.Equal(failure.BackoffUntil, fetched!.BackoffUntil);
Assert.Equal("network timeout", fetched.LastFailureReason);
}
}

View File

@@ -0,0 +1,93 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using MongoDB.Bson;
using MongoDB.Driver;
using MongoDB.Driver.GridFS;
using StellaOps.Feedser.Storage.Mongo;
using StellaOps.Feedser.Storage.Mongo.Documents;
using StellaOps.Feedser.Storage.Mongo.Dtos;
namespace StellaOps.Feedser.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class RawDocumentRetentionServiceTests : IClassFixture<MongoIntegrationFixture>
{
private readonly MongoIntegrationFixture _fixture;
public RawDocumentRetentionServiceTests(MongoIntegrationFixture fixture)
{
_fixture = fixture;
}
[Fact]
public async Task SweepExpiredDocumentsAsync_RemovesExpiredRawDocuments()
{
var database = _fixture.Database;
var documents = database.GetCollection<DocumentDocument>(MongoStorageDefaults.Collections.Document);
var dtos = database.GetCollection<DtoDocument>(MongoStorageDefaults.Collections.Dto);
var bucket = new GridFSBucket(database, new GridFSBucketOptions { BucketName = "documents" });
var now = new DateTimeOffset(2024, 10, 1, 12, 0, 0, TimeSpan.Zero);
var fakeTime = new FakeTimeProvider(now);
var options = Options.Create(new MongoStorageOptions
{
ConnectionString = _fixture.Runner.ConnectionString,
DatabaseName = database.DatabaseNamespace.DatabaseName,
RawDocumentRetention = TimeSpan.FromDays(1),
RawDocumentRetentionTtlGrace = TimeSpan.Zero,
RawDocumentRetentionSweepInterval = TimeSpan.FromMinutes(5),
});
var expiredId = Guid.NewGuid().ToString();
var gridFsId = await bucket.UploadFromBytesAsync("expired", new byte[] { 1, 2, 3 });
await documents.InsertOneAsync(new DocumentDocument
{
Id = expiredId,
SourceName = "nvd",
Uri = "https://example.test/cve",
FetchedAt = now.AddDays(-2).UtcDateTime,
Sha256 = "abc",
Status = "pending",
ExpiresAt = now.AddMinutes(-5).UtcDateTime,
GridFsId = gridFsId,
});
await dtos.InsertOneAsync(new DtoDocument
{
Id = Guid.NewGuid().ToString(),
DocumentId = expiredId,
SourceName = "nvd",
SchemaVersion = "schema",
Payload = new BsonDocument("value", 1),
ValidatedAt = now.UtcDateTime,
});
var freshId = Guid.NewGuid().ToString();
await documents.InsertOneAsync(new DocumentDocument
{
Id = freshId,
SourceName = "nvd",
Uri = "https://example.test/future",
FetchedAt = now.UtcDateTime,
Sha256 = "def",
Status = "pending",
ExpiresAt = now.AddHours(1).UtcDateTime,
GridFsId = null,
});
var service = new RawDocumentRetentionService(database, options, NullLogger<RawDocumentRetentionService>.Instance, fakeTime);
var removed = await service.SweepExpiredDocumentsAsync(CancellationToken.None);
Assert.Equal(1, removed);
Assert.Equal(0, await documents.CountDocumentsAsync(d => d.Id == expiredId));
Assert.Equal(0, await dtos.CountDocumentsAsync(d => d.DocumentId == expiredId));
Assert.Equal(1, await documents.CountDocumentsAsync(d => d.Id == freshId));
var filter = Builders<GridFSFileInfo>.Filter.Eq("_id", gridFsId);
using var cursor = await bucket.FindAsync(filter);
Assert.Empty(await cursor.ToListAsync());
}
}

View File

@@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Feedser.Core/StellaOps.Feedser.Core.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Models/StellaOps.Feedser.Models.csproj" />
<ProjectReference Include="../StellaOps.Feedser.Storage.Mongo/StellaOps.Feedser.Storage.Mongo.csproj" />
</ItemGroup>
</Project>