up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-28 00:45:16 +02:00
parent 3b96b2e3ea
commit 1c6730a1d2
95 changed files with 14504 additions and 463 deletions

View File

@@ -0,0 +1,59 @@
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Models.Observations;
namespace StellaOps.Concelier.Core.Aoc;
/// <summary>
/// Enforces append-only semantics for advisory observations per LNM-21-004.
/// </summary>
/// <remarks>
/// The Aggregation-Only Contract (AOC) requires that observations are never mutated after creation.
/// This guard allows:
/// - New observations (no existing record)
/// - Idempotent re-inserts (existing record with identical content hash)
///
/// It rejects:
/// - Mutations (existing record with different content hash)
/// </remarks>
public sealed class AdvisoryObservationWriteGuard : IAdvisoryObservationWriteGuard
{
private readonly ILogger<AdvisoryObservationWriteGuard> _logger;
public AdvisoryObservationWriteGuard(ILogger<AdvisoryObservationWriteGuard> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ObservationWriteDisposition ValidateWrite(AdvisoryObservation observation, string? existingContentHash)
{
ArgumentNullException.ThrowIfNull(observation);
var newContentHash = observation.Upstream.ContentHash;
if (string.IsNullOrWhiteSpace(existingContentHash))
{
_logger.LogDebug(
"Observation {ObservationId} is new, allowing write",
observation.ObservationId);
return ObservationWriteDisposition.Proceed;
}
if (string.Equals(existingContentHash, newContentHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogDebug(
"Observation {ObservationId} has identical content hash {ContentHash}, skipping as idempotent",
observation.ObservationId,
newContentHash);
return ObservationWriteDisposition.SkipIdentical;
}
_logger.LogWarning(
"Observation {ObservationId} mutation detected: existing hash {ExistingHash} differs from new hash {NewHash}. " +
"Append-only contract violation.",
observation.ObservationId,
existingContentHash,
newContentHash);
return ObservationWriteDisposition.RejectMutation;
}
}

View File

@@ -35,6 +35,9 @@ public static class AocServiceCollectionExtensions
return new AdvisoryRawWriteGuard(guard, options);
});
// Append-only write guard for observations (LNM-21-004)
services.TryAddSingleton<IAdvisoryObservationWriteGuard, AdvisoryObservationWriteGuard>();
return services;
}
}

View File

@@ -0,0 +1,43 @@
namespace StellaOps.Concelier.Core.Aoc;
/// <summary>
/// Exception thrown when an append-only contract violation is detected.
/// </summary>
/// <remarks>
/// Per LNM-21-004, observations must not be mutated after creation.
/// This exception is thrown when attempting to update an existing observation
/// with different content.
/// </remarks>
[Serializable]
public sealed class AppendOnlyViolationException : Exception
{
public AppendOnlyViolationException(
string observationId,
string existingContentHash,
string newContentHash)
: base($"Append-only violation for observation '{observationId}': " +
$"existing content hash '{existingContentHash}' differs from new hash '{newContentHash}'.")
{
ObservationId = observationId;
ExistingContentHash = existingContentHash;
NewContentHash = newContentHash;
}
public AppendOnlyViolationException(string message) : base(message)
{
}
public AppendOnlyViolationException(string message, Exception innerException) : base(message, innerException)
{
}
public AppendOnlyViolationException()
{
}
public string? ObservationId { get; }
public string? ExistingContentHash { get; }
public string? NewContentHash { get; }
}

View File

@@ -0,0 +1,39 @@
using StellaOps.Concelier.Models.Observations;
namespace StellaOps.Concelier.Core.Aoc;
/// <summary>
/// Guard that enforces append-only semantics for advisory observations.
/// Prevents mutation of existing observations while allowing idempotent re-inserts.
/// </summary>
public interface IAdvisoryObservationWriteGuard
{
/// <summary>
/// Validates an observation write operation for append-only compliance.
/// </summary>
/// <param name="observation">The observation to validate.</param>
/// <param name="existingContentHash">Content hash of existing observation if any, null if new.</param>
/// <returns>Write disposition indicating whether to proceed, skip, or reject.</returns>
ObservationWriteDisposition ValidateWrite(AdvisoryObservation observation, string? existingContentHash);
}
/// <summary>
/// Result of append-only write validation.
/// </summary>
public enum ObservationWriteDisposition
{
/// <summary>
/// Observation is new or identical - proceed with write.
/// </summary>
Proceed,
/// <summary>
/// Observation is identical to existing - skip write (idempotent).
/// </summary>
SkipIdentical,
/// <summary>
/// Observation differs from existing - reject mutation (append-only violation).
/// </summary>
RejectMutation
}

View File

@@ -0,0 +1,37 @@
namespace StellaOps.Concelier.Core.Linksets;
/// <summary>
/// Configuration options for the <c>advisory.linkset.updated@1</c> event publisher.
/// </summary>
public sealed class AdvisoryLinksetEventPublisherOptions
{
/// <summary>
/// NATS subject for linkset events. Default: concelier.advisory.linkset.updated.v1
/// </summary>
public string NatsSubject { get; set; } = "concelier.advisory.linkset.updated.v1";
/// <summary>
/// Redis stream key for fallback transport. Default: concelier:advisory.linkset.updated:v1
/// </summary>
public string RedisStreamKey { get; set; } = "concelier:advisory.linkset.updated:v1";
/// <summary>
/// Enable NATS transport. When false, events are stored in outbox only.
/// </summary>
public bool NatsEnabled { get; set; }
/// <summary>
/// Maximum retry attempts for transport failures.
/// </summary>
public int MaxRetries { get; set; } = 5;
/// <summary>
/// Backoff cap in seconds for retry attempts.
/// </summary>
public int BackoffCapSeconds { get; set; } = 30;
/// <summary>
/// Batch size for outbox processing.
/// </summary>
public int OutboxBatchSize { get; set; } = 100;
}

View File

@@ -0,0 +1,168 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Core.Linksets;
/// <summary>
/// Contract-matching payload for <c>advisory.linkset.updated@1</c> events.
/// Per LNM-21-005, emits delta descriptions + observation ids (tenant + provenance only).
/// </summary>
public sealed record AdvisoryLinksetUpdatedEvent(
Guid EventId,
string TenantId,
string LinksetId,
string AdvisoryId,
string Source,
ImmutableArray<string> ObservationIds,
AdvisoryLinksetDelta Delta,
double? Confidence,
ImmutableArray<AdvisoryLinksetConflictSummary> Conflicts,
AdvisoryLinksetProvenanceSummary Provenance,
DateTimeOffset CreatedAt,
string ReplayCursor,
string? BuiltByJobId = null,
string? TraceId = null)
{
public static AdvisoryLinksetUpdatedEvent FromLinkset(
AdvisoryLinkset linkset,
AdvisoryLinkset? previousLinkset,
string linksetId,
string? traceId,
string? replayCursor = null)
{
ArgumentNullException.ThrowIfNull(linkset);
var tenantUrn = linkset.TenantId.StartsWith("urn:tenant:", StringComparison.Ordinal)
? linkset.TenantId
: $"urn:tenant:{linkset.TenantId}";
var delta = ComputeDelta(linkset, previousLinkset);
var conflicts = BuildConflictSummaries(linkset.Conflicts);
var provenance = BuildProvenance(linkset.Provenance);
return new AdvisoryLinksetUpdatedEvent(
EventId: Guid.NewGuid(),
TenantId: tenantUrn,
LinksetId: linksetId,
AdvisoryId: linkset.AdvisoryId,
Source: linkset.Source,
ObservationIds: linkset.ObservationIds,
Delta: delta,
Confidence: linkset.Confidence,
Conflicts: conflicts,
Provenance: provenance,
CreatedAt: linkset.CreatedAt,
ReplayCursor: replayCursor ?? linkset.CreatedAt.ToUniversalTime().Ticks.ToString(),
BuiltByJobId: linkset.BuiltByJobId,
TraceId: traceId);
}
private static AdvisoryLinksetDelta ComputeDelta(AdvisoryLinkset current, AdvisoryLinkset? previous)
{
if (previous is null)
{
return new AdvisoryLinksetDelta(
Type: "created",
ObservationsAdded: current.ObservationIds,
ObservationsRemoved: ImmutableArray<string>.Empty,
ConfidenceChanged: current.Confidence is not null,
ConflictsChanged: current.Conflicts is not null && current.Conflicts.Count > 0);
}
var currentSet = current.ObservationIds.ToHashSet(StringComparer.Ordinal);
var previousSet = previous.ObservationIds.ToHashSet(StringComparer.Ordinal);
var added = current.ObservationIds.Where(id => !previousSet.Contains(id)).ToImmutableArray();
var removed = previous.ObservationIds.Where(id => !currentSet.Contains(id)).ToImmutableArray();
var confidenceChanged = !Equals(current.Confidence, previous.Confidence);
var conflictsChanged = !ConflictsEqual(current.Conflicts, previous.Conflicts);
return new AdvisoryLinksetDelta(
Type: "updated",
ObservationsAdded: added,
ObservationsRemoved: removed,
ConfidenceChanged: confidenceChanged,
ConflictsChanged: conflictsChanged);
}
private static bool ConflictsEqual(IReadOnlyList<AdvisoryLinksetConflict>? a, IReadOnlyList<AdvisoryLinksetConflict>? b)
{
if (a is null && b is null) return true;
if (a is null || b is null) return false;
if (a.Count != b.Count) return false;
for (var i = 0; i < a.Count; i++)
{
if (a[i].Field != b[i].Field || a[i].Reason != b[i].Reason)
{
return false;
}
}
return true;
}
private static ImmutableArray<AdvisoryLinksetConflictSummary> BuildConflictSummaries(
IReadOnlyList<AdvisoryLinksetConflict>? conflicts)
{
if (conflicts is null || conflicts.Count == 0)
{
return ImmutableArray<AdvisoryLinksetConflictSummary>.Empty;
}
return conflicts
.Select(c => new AdvisoryLinksetConflictSummary(c.Field, c.Reason, c.SourceIds?.ToImmutableArray() ?? ImmutableArray<string>.Empty))
.OrderBy(c => c.Field, StringComparer.Ordinal)
.ThenBy(c => c.Reason, StringComparer.Ordinal)
.ToImmutableArray();
}
private static AdvisoryLinksetProvenanceSummary BuildProvenance(AdvisoryLinksetProvenance? provenance)
{
if (provenance is null)
{
return new AdvisoryLinksetProvenanceSummary(
ObservationHashes: ImmutableArray<string>.Empty,
ToolVersion: null,
PolicyHash: null);
}
var hashes = provenance.ObservationHashes?.ToImmutableArray() ?? ImmutableArray<string>.Empty;
return new AdvisoryLinksetProvenanceSummary(
ObservationHashes: hashes,
ToolVersion: provenance.ToolVersion,
PolicyHash: provenance.PolicyHash);
}
}
/// <summary>
/// Delta description for linkset changes.
/// </summary>
public sealed record AdvisoryLinksetDelta(
string Type,
ImmutableArray<string> ObservationsAdded,
ImmutableArray<string> ObservationsRemoved,
bool ConfidenceChanged,
bool ConflictsChanged);
/// <summary>
/// Conflict summary for event payload.
/// </summary>
public sealed record AdvisoryLinksetConflictSummary(
string Field,
string Reason,
ImmutableArray<string> SourceIds);
/// <summary>
/// Provenance summary for event payload.
/// </summary>
public sealed record AdvisoryLinksetProvenanceSummary(
ImmutableArray<string> ObservationHashes,
string? ToolVersion,
string? PolicyHash);

View File

@@ -0,0 +1,26 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Concelier.Core.Linksets;
/// <summary>
/// Outbox for storing linkset events before transport.
/// </summary>
public interface IAdvisoryLinksetEventOutbox
{
/// <summary>
/// Enqueues a linkset event for later publishing.
/// </summary>
Task EnqueueAsync(AdvisoryLinksetUpdatedEvent @event, CancellationToken cancellationToken);
/// <summary>
/// Retrieves unpublished events up to the specified limit.
/// </summary>
Task<IReadOnlyList<AdvisoryLinksetUpdatedEvent>> GetPendingAsync(int limit, CancellationToken cancellationToken);
/// <summary>
/// Marks an event as published.
/// </summary>
Task MarkPublishedAsync(AdvisoryLinksetUpdatedEvent @event, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,12 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Concelier.Core.Linksets;
/// <summary>
/// Publishes <c>advisory.linkset.updated@1</c> events.
/// </summary>
public interface IAdvisoryLinksetEventPublisher
{
Task PublishAsync(AdvisoryLinksetUpdatedEvent @event, CancellationToken cancellationToken);
}

View File

@@ -29,6 +29,8 @@ This module owns the persistent shape of Concelier's MongoDB database. Upgrades
| `20251104_advisory_observations_raw_linkset` | Backfills `rawLinkset` on `advisory_observations` using stored `advisory_raw` documents so canonical and raw projections co-exist for downstream policy joins. |
| `20251120_advisory_observation_events` | Creates `advisory_observation_events` collection with tenant/hash indexes for observation event fan-out (advisory.observation.updated@1). Includes optional `publishedAt` marker for transport outbox. |
| `20251117_advisory_linksets_tenant_lower` | Lowercases `advisory_linksets.tenantId` to align writes with lookup filters. |
| `20251116_link_not_merge_collections` | Ensures `advisory_observations` and `advisory_linksets` collections exist with JSON schema validators and baseline indexes for LNM. |
| `20251127_lnm_sharding_and_ttl` | Adds hashed shard key indexes on `tenantId` for horizontal scaling and optional TTL indexes on `ingestedAt`/`createdAt` for storage retention. Creates `advisory_linkset_events` collection for linkset event outbox (LNM-21-101-DEV). |
## Operator Runbook

View File

@@ -0,0 +1,548 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Bson.IO;
using MongoDB.Driver;
using StellaOps.Concelier.Storage.Mongo.Observations;
namespace StellaOps.Concelier.Storage.Mongo.Migrations;
/// <summary>
/// Backfills advisory_observations and advisory_linksets from existing advisory_raw documents.
/// Per LNM-21-102-DEV: Creates immutable observations from raw documents and groups them into linksets.
/// Also seeds tombstones for rollback tracking (backfill_marker field) to support Offline Kit rollback.
/// </summary>
internal sealed class EnsureLegacyAdvisoriesBackfillMigration : IMongoMigration
{
private const int BulkBatchSize = 250;
private const string BackfillMarkerField = "backfill_marker";
private const string BackfillMarkerValue = "lnm_21_102_dev";
private static readonly JsonWriterSettings JsonSettings = new() { OutputMode = JsonOutputMode.RelaxedExtendedJson };
private readonly MongoStorageOptions _options;
private readonly ILogger<EnsureLegacyAdvisoriesBackfillMigration> _logger;
public EnsureLegacyAdvisoriesBackfillMigration(
IOptions<MongoStorageOptions> options,
ILogger<EnsureLegacyAdvisoriesBackfillMigration> logger)
{
ArgumentNullException.ThrowIfNull(options);
ArgumentNullException.ThrowIfNull(logger);
_options = options.Value;
_logger = logger;
}
public string Id => "20251127_lnm_legacy_backfill";
public string Description => "Backfill advisory_observations and advisory_linksets from advisory_raw; seed tombstones for rollback (LNM-21-102-DEV)";
public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(database);
var rawCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw);
var observationsCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
var linksetsCollection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
_logger.LogInformation("Starting legacy advisory backfill migration {MigrationId}", Id);
var backfilledObservations = await BackfillObservationsAsync(
rawCollection,
observationsCollection,
cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Backfilled {Count} observations from advisory_raw",
backfilledObservations.Count);
if (backfilledObservations.Count > 0)
{
var linksetsCreated = await CreateLinksetsFromObservationsAsync(
observationsCollection,
linksetsCollection,
backfilledObservations,
cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Created/updated {Count} linksets from backfilled observations",
linksetsCreated);
}
await SeedTombstonesAsync(rawCollection, cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Completed legacy advisory backfill migration {MigrationId}", Id);
}
private async Task<IReadOnlyList<string>> BackfillObservationsAsync(
IMongoCollection<BsonDocument> rawCollection,
IMongoCollection<BsonDocument> observationsCollection,
CancellationToken ct)
{
var backfilledIds = new List<string>();
var batchSize = Math.Max(25, _options.BackfillBatchSize);
string? lastId = null;
while (true)
{
var filter = Builders<BsonDocument>.Filter.Empty;
if (!string.IsNullOrEmpty(lastId))
{
filter = Builders<BsonDocument>.Filter.Gt("_id", lastId);
}
var rawDocs = await rawCollection
.Find(filter)
.Sort(Builders<BsonDocument>.Sort.Ascending("_id"))
.Limit(batchSize)
.ToListAsync(ct)
.ConfigureAwait(false);
if (rawDocs.Count == 0)
{
break;
}
lastId = rawDocs[^1]["_id"].AsString;
var rawDocIds = rawDocs
.Select(d => BuildObservationIdFromRaw(d))
.Where(id => !string.IsNullOrEmpty(id))
.ToArray();
var existingFilter = Builders<BsonDocument>.Filter.In("_id", rawDocIds);
var existingObservations = await observationsCollection
.Find(existingFilter)
.Project(Builders<BsonDocument>.Projection.Include("_id"))
.ToListAsync(ct)
.ConfigureAwait(false);
var existingIds = existingObservations
.Select(d => d["_id"].AsString)
.ToHashSet(StringComparer.Ordinal);
var newObservations = new List<BsonDocument>();
foreach (var rawDoc in rawDocs)
{
var observationId = BuildObservationIdFromRaw(rawDoc);
if (string.IsNullOrEmpty(observationId) || existingIds.Contains(observationId))
{
continue;
}
var observation = MapRawToObservation(rawDoc, observationId);
if (observation is not null)
{
newObservations.Add(observation);
backfilledIds.Add(observationId);
}
}
if (newObservations.Count > 0)
{
try
{
await observationsCollection.InsertManyAsync(
newObservations,
new InsertManyOptions { IsOrdered = false },
ct).ConfigureAwait(false);
}
catch (MongoBulkWriteException ex) when (ex.WriteErrors.All(e => e.Category == ServerErrorCategory.DuplicateKey))
{
_logger.LogDebug(
"Some observations already exist during backfill batch; continuing with {Inserted} inserted",
newObservations.Count - ex.WriteErrors.Count);
}
}
}
return backfilledIds;
}
private async Task<int> CreateLinksetsFromObservationsAsync(
IMongoCollection<BsonDocument> observationsCollection,
IMongoCollection<BsonDocument> linksetsCollection,
IReadOnlyList<string> observationIds,
CancellationToken ct)
{
var filter = Builders<BsonDocument>.Filter.In("_id", observationIds);
var pipeline = new EmptyPipelineDefinition<BsonDocument>()
.Match(filter)
.Group(new BsonDocument
{
{
"_id",
new BsonDocument
{
{ "tenant", "$tenant" },
{ "advisoryKey", new BsonDocument("$ifNull", new BsonArray { "$advisoryKey", "$linkset.aliases" }) },
{ "vendor", "$source.vendor" }
}
},
{ "observationIds", new BsonDocument("$push", "$_id") },
{ "latestCreatedAt", new BsonDocument("$max", "$createdAt") },
{
"purls",
new BsonDocument("$push", new BsonDocument("$ifNull", new BsonArray { "$linkset.purls", new BsonArray() }))
},
{
"cpes",
new BsonDocument("$push", new BsonDocument("$ifNull", new BsonArray { "$linkset.cpes", new BsonArray() }))
}
});
using var cursor = await observationsCollection
.AggregateAsync(pipeline, cancellationToken: ct)
.ConfigureAwait(false);
var linksetUpdates = new List<WriteModel<BsonDocument>>();
var createdCount = 0;
while (await cursor.MoveNextAsync(ct).ConfigureAwait(false))
{
foreach (var group in cursor.Current)
{
var groupId = group["_id"].AsBsonDocument;
var tenant = groupId.GetValue("tenant", BsonString.Empty).AsString;
var advisoryKey = ExtractAdvisoryKeyFromGroup(groupId);
var vendor = groupId.GetValue("vendor", BsonString.Empty).AsString;
var observations = group["observationIds"].AsBsonArray.Select(v => v.AsString).ToList();
var latestCreatedAt = group["latestCreatedAt"].ToUniversalTime();
if (string.IsNullOrWhiteSpace(tenant) || string.IsNullOrWhiteSpace(advisoryKey) || observations.Count == 0)
{
continue;
}
var purls = FlattenArrayOfArrays(group["purls"].AsBsonArray);
var cpes = FlattenArrayOfArrays(group["cpes"].AsBsonArray);
var linksetFilter = Builders<BsonDocument>.Filter.And(
Builders<BsonDocument>.Filter.Eq("tenantId", tenant.ToLowerInvariant()),
Builders<BsonDocument>.Filter.Eq("source", vendor),
Builders<BsonDocument>.Filter.Eq("advisoryId", advisoryKey));
var linksetUpdate = new BsonDocument
{
{ "$setOnInsert", new BsonDocument
{
{ "tenantId", tenant.ToLowerInvariant() },
{ "source", vendor },
{ "advisoryId", advisoryKey },
{ "createdAt", latestCreatedAt },
{ BackfillMarkerField, BackfillMarkerValue }
}
},
{ "$addToSet", new BsonDocument
{
{ "observations", new BsonDocument("$each", new BsonArray(observations)) }
}
},
{ "$set", new BsonDocument
{
{ "normalized.purls", new BsonArray(purls.Distinct(StringComparer.Ordinal)) },
{ "normalized.cpes", new BsonArray(cpes.Distinct(StringComparer.Ordinal)) }
}
}
};
linksetUpdates.Add(new UpdateOneModel<BsonDocument>(linksetFilter, linksetUpdate)
{
IsUpsert = true
});
createdCount++;
if (linksetUpdates.Count >= BulkBatchSize)
{
await linksetsCollection.BulkWriteAsync(linksetUpdates, cancellationToken: ct).ConfigureAwait(false);
linksetUpdates.Clear();
}
}
}
if (linksetUpdates.Count > 0)
{
await linksetsCollection.BulkWriteAsync(linksetUpdates, cancellationToken: ct).ConfigureAwait(false);
}
return createdCount;
}
private async Task SeedTombstonesAsync(
IMongoCollection<BsonDocument> rawCollection,
CancellationToken ct)
{
var filter = Builders<BsonDocument>.Filter.Exists(BackfillMarkerField, false);
var update = Builders<BsonDocument>.Update.Set(BackfillMarkerField, BackfillMarkerValue);
var result = await rawCollection
.UpdateManyAsync(filter, update, cancellationToken: ct)
.ConfigureAwait(false);
_logger.LogInformation(
"Seeded tombstone markers on {Count} advisory_raw documents for rollback tracking",
result.ModifiedCount);
}
private static string BuildObservationIdFromRaw(BsonDocument rawDoc)
{
var tenant = rawDoc.GetValue("tenant", BsonString.Empty).AsString;
var sourceDoc = rawDoc.GetValue("source", BsonNull.Value);
var upstreamDoc = rawDoc.GetValue("upstream", BsonNull.Value);
if (sourceDoc.IsBsonNull || upstreamDoc.IsBsonNull)
{
return string.Empty;
}
var vendor = sourceDoc.AsBsonDocument.GetValue("vendor", BsonString.Empty).AsString;
var upstreamId = upstreamDoc.AsBsonDocument.GetValue("upstream_id", BsonString.Empty).AsString;
var contentHash = upstreamDoc.AsBsonDocument.GetValue("content_hash", BsonString.Empty).AsString;
if (string.IsNullOrWhiteSpace(tenant) || string.IsNullOrWhiteSpace(vendor) ||
string.IsNullOrWhiteSpace(upstreamId) || string.IsNullOrWhiteSpace(contentHash))
{
return string.Empty;
}
return $"obs:{tenant}:{vendor}:{SanitizeIdSegment(upstreamId)}:{ShortenHash(contentHash)}";
}
private static BsonDocument? MapRawToObservation(BsonDocument rawDoc, string observationId)
{
try
{
var tenant = rawDoc.GetValue("tenant", BsonString.Empty).AsString;
var sourceDoc = rawDoc["source"].AsBsonDocument;
var upstreamDoc = rawDoc["upstream"].AsBsonDocument;
var contentDoc = rawDoc["content"].AsBsonDocument;
var linksetDoc = rawDoc.GetValue("linkset", new BsonDocument()).AsBsonDocument;
var advisoryKey = rawDoc.GetValue("advisory_key", BsonString.Empty).AsString;
var ingestedAt = GetDateTime(rawDoc, "ingested_at");
var retrievedAt = GetDateTime(upstreamDoc, "retrieved_at");
var observation = new BsonDocument
{
{ "_id", observationId },
{ "tenant", tenant },
{ "advisoryKey", advisoryKey },
{
"source", new BsonDocument
{
{ "vendor", sourceDoc.GetValue("vendor", BsonString.Empty).AsString },
{ "stream", sourceDoc.GetValue("stream", BsonString.Empty).AsString },
{ "api", sourceDoc.GetValue("connector", BsonString.Empty).AsString },
{ "collectorVersion", sourceDoc.GetValue("version", BsonNull.Value) }
}
},
{
"upstream", new BsonDocument
{
{ "upstream_id", upstreamDoc.GetValue("upstream_id", BsonString.Empty).AsString },
{ "document_version", upstreamDoc.GetValue("document_version", BsonNull.Value) },
{ "fetchedAt", retrievedAt },
{ "receivedAt", ingestedAt },
{ "contentHash", upstreamDoc.GetValue("content_hash", BsonString.Empty).AsString },
{
"signature", MapSignature(upstreamDoc.GetValue("signature", new BsonDocument()).AsBsonDocument)
},
{ "metadata", upstreamDoc.GetValue("provenance", new BsonDocument()) }
}
},
{
"content", new BsonDocument
{
{ "format", contentDoc.GetValue("format", BsonString.Empty).AsString },
{ "specVersion", contentDoc.GetValue("spec_version", BsonNull.Value) },
{ "raw", contentDoc.GetValue("raw", new BsonDocument()) },
{ "metadata", new BsonDocument() }
}
},
{ "linkset", MapLinkset(linksetDoc) },
{ "rawLinkset", MapRawLinkset(linksetDoc, rawDoc.GetValue("identifiers", new BsonDocument()).AsBsonDocument) },
{ "createdAt", ingestedAt },
{ "ingestedAt", ingestedAt },
{ BackfillMarkerField, BackfillMarkerValue }
};
return observation;
}
catch (Exception)
{
return null;
}
}
private static BsonDocument MapSignature(BsonDocument signatureDoc)
{
return new BsonDocument
{
{ "present", signatureDoc.GetValue("present", BsonBoolean.False).AsBoolean },
{ "format", signatureDoc.GetValue("format", BsonNull.Value) },
{ "keyId", signatureDoc.GetValue("key_id", BsonNull.Value) },
{ "signature", signatureDoc.GetValue("sig", BsonNull.Value) }
};
}
private static BsonDocument MapLinkset(BsonDocument linksetDoc)
{
return new BsonDocument
{
{ "aliases", linksetDoc.GetValue("aliases", new BsonArray()) },
{ "purls", linksetDoc.GetValue("purls", new BsonArray()) },
{ "cpes", linksetDoc.GetValue("cpes", new BsonArray()) },
{ "references", MapReferences(linksetDoc.GetValue("references", new BsonArray()).AsBsonArray) }
};
}
private static BsonArray MapReferences(BsonArray referencesArray)
{
var result = new BsonArray();
foreach (var refValue in referencesArray)
{
if (!refValue.IsBsonDocument)
{
continue;
}
var refDoc = refValue.AsBsonDocument;
result.Add(new BsonDocument
{
{ "type", refDoc.GetValue("type", BsonString.Empty).AsString },
{ "url", refDoc.GetValue("url", BsonString.Empty).AsString }
});
}
return result;
}
private static BsonDocument MapRawLinkset(BsonDocument linksetDoc, BsonDocument identifiersDoc)
{
var aliases = new BsonArray();
if (identifiersDoc.TryGetValue("primary", out var primary) && !primary.IsBsonNull)
{
aliases.Add(primary);
}
if (identifiersDoc.TryGetValue("aliases", out var idAliases) && idAliases.IsBsonArray)
{
foreach (var alias in idAliases.AsBsonArray)
{
aliases.Add(alias);
}
}
if (linksetDoc.TryGetValue("aliases", out var linkAliases) && linkAliases.IsBsonArray)
{
foreach (var alias in linkAliases.AsBsonArray)
{
aliases.Add(alias);
}
}
return new BsonDocument
{
{ "aliases", aliases },
{ "scopes", new BsonArray() },
{ "relationships", new BsonArray() },
{ "purls", linksetDoc.GetValue("purls", new BsonArray()) },
{ "cpes", linksetDoc.GetValue("cpes", new BsonArray()) },
{ "references", linksetDoc.GetValue("references", new BsonArray()) },
{ "reconciled_from", linksetDoc.GetValue("reconciled_from", new BsonArray()) },
{ "notes", linksetDoc.GetValue("notes", new BsonDocument()) }
};
}
private static string ExtractAdvisoryKeyFromGroup(BsonDocument groupId)
{
var advisoryKeyValue = groupId.GetValue("advisoryKey", BsonNull.Value);
if (advisoryKeyValue.IsBsonArray)
{
var array = advisoryKeyValue.AsBsonArray;
return array.Count > 0 ? array[0].AsString : string.Empty;
}
return advisoryKeyValue.IsBsonNull ? string.Empty : advisoryKeyValue.AsString;
}
private static IReadOnlyList<string> FlattenArrayOfArrays(BsonArray arrayOfArrays)
{
var result = new List<string>();
foreach (var item in arrayOfArrays)
{
if (item.IsBsonArray)
{
foreach (var subItem in item.AsBsonArray)
{
if (subItem.IsString && !string.IsNullOrWhiteSpace(subItem.AsString))
{
result.Add(subItem.AsString);
}
}
}
else if (item.IsString && !string.IsNullOrWhiteSpace(item.AsString))
{
result.Add(item.AsString);
}
}
return result;
}
private static DateTime GetDateTime(BsonDocument doc, string field)
{
if (!doc.TryGetValue(field, out var value) || value.IsBsonNull)
{
return DateTime.UtcNow;
}
return value.BsonType switch
{
BsonType.DateTime => value.ToUniversalTime(),
BsonType.String when DateTime.TryParse(value.AsString, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)
=> parsed.ToUniversalTime(),
BsonType.Int64 => DateTimeOffset.FromUnixTimeMilliseconds(value.AsInt64).UtcDateTime,
_ => DateTime.UtcNow
};
}
private static string SanitizeIdSegment(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "unknown";
}
var sanitized = string.Concat(value.Select(c =>
char.IsLetterOrDigit(c) ? char.ToLowerInvariant(c) : (c is '-' or '.' ? c : '-')));
sanitized = sanitized.Trim('-');
if (string.IsNullOrEmpty(sanitized))
{
return "unknown";
}
return sanitized.Length > 48 ? sanitized[..48] : sanitized;
}
private static string ShortenHash(string hash)
{
if (string.IsNullOrWhiteSpace(hash))
{
return "0";
}
var clean = hash.Replace(":", "-");
return clean.Length > 12 ? clean[..12] : clean;
}
}

View File

@@ -0,0 +1,203 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Options;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Concelier.Storage.Mongo.Migrations;
/// <summary>
/// Adds hashed shard key indexes and TTL indexes for LNM collections.
/// Per LNM-21-101-DEV: hashed shard keys for horizontal scaling, tenant indexes, TTL for ingest metadata.
/// </summary>
internal sealed class EnsureLinkNotMergeShardingAndTtlMigration : IMongoMigration
{
private readonly MongoStorageOptions _options;
public EnsureLinkNotMergeShardingAndTtlMigration(IOptions<MongoStorageOptions> options)
{
ArgumentNullException.ThrowIfNull(options);
_options = options.Value;
}
public string Id => "20251127_lnm_sharding_and_ttl";
public string Description => "Add hashed shard key indexes and TTL indexes for advisory_observations and advisory_linksets (LNM-21-101-DEV)";
public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(database);
await EnsureObservationShardingAndTtlAsync(database, cancellationToken).ConfigureAwait(false);
await EnsureLinksetShardingAndTtlAsync(database, cancellationToken).ConfigureAwait(false);
await EnsureLinksetEventShardingAndTtlAsync(database, cancellationToken).ConfigureAwait(false);
}
private async Task EnsureObservationShardingAndTtlAsync(IMongoDatabase database, CancellationToken ct)
{
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryObservations);
var indexes = new List<CreateIndexModel<BsonDocument>>();
// Hashed shard key on tenantId for horizontal scaling
indexes.Add(new CreateIndexModel<BsonDocument>(
new BsonDocument("tenantId", "hashed"),
new CreateIndexOptions { Name = "obs_tenantId_hashed", Background = true }));
// TTL index on ingestedAt if retention is configured
var needsTtl = _options.ObservationRetention > TimeSpan.Zero;
if (needsTtl)
{
await EnsureTtlIndexAsync(
collection,
"ingestedAt",
"obs_ingestedAt_ttl",
_options.ObservationRetention,
ct).ConfigureAwait(false);
}
await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false);
}
private async Task EnsureLinksetShardingAndTtlAsync(IMongoDatabase database, CancellationToken ct)
{
var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryLinksets);
var indexes = new List<CreateIndexModel<BsonDocument>>();
// Hashed shard key on tenantId for horizontal scaling
indexes.Add(new CreateIndexModel<BsonDocument>(
new BsonDocument("tenantId", "hashed"),
new CreateIndexOptions { Name = "linkset_tenantId_hashed", Background = true }));
await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false);
// TTL index on createdAt if retention is configured
var needsTtl = _options.LinksetRetention > TimeSpan.Zero;
if (needsTtl)
{
await EnsureTtlIndexAsync(
collection,
"createdAt",
"linkset_createdAt_ttl",
_options.LinksetRetention,
ct).ConfigureAwait(false);
}
}
private async Task EnsureLinksetEventShardingAndTtlAsync(IMongoDatabase database, CancellationToken ct)
{
// Check if linkset events collection exists (future-proofing for event outbox)
var collectionName = "advisory_linkset_events";
var filter = new BsonDocument("name", collectionName);
using var cursor = await database.ListCollectionsAsync(new ListCollectionsOptions { Filter = filter }, ct).ConfigureAwait(false);
var exists = await cursor.AnyAsync(ct).ConfigureAwait(false);
if (!exists)
{
// Create the collection for linkset events with basic schema
var validator = new BsonDocument("$jsonSchema", new BsonDocument
{
{ "bsonType", "object" },
{ "required", new BsonArray { "_id", "tenantId", "eventId", "linksetId", "createdAt" } },
{ "properties", new BsonDocument
{
{ "_id", new BsonDocument("bsonType", "objectId") },
{ "tenantId", new BsonDocument("bsonType", "string") },
{ "eventId", new BsonDocument("bsonType", "string") },
{ "linksetId", new BsonDocument("bsonType", "string") },
{ "advisoryId", new BsonDocument("bsonType", "string") },
{ "payload", new BsonDocument("bsonType", "object") },
{ "createdAt", new BsonDocument("bsonType", "date") },
{ "publishedAt", new BsonDocument("bsonType", new BsonArray { "date", "null" }) }
}
}
});
var createOptions = new CreateCollectionOptions<BsonDocument>
{
Validator = validator,
ValidationLevel = DocumentValidationLevel.Moderate,
ValidationAction = DocumentValidationAction.Error,
};
await database.CreateCollectionAsync(collectionName, createOptions, ct).ConfigureAwait(false);
}
var collection = database.GetCollection<BsonDocument>(collectionName);
var indexes = new List<CreateIndexModel<BsonDocument>>
{
// Hashed shard key
new(new BsonDocument("tenantId", "hashed"),
new CreateIndexOptions { Name = "linkset_event_tenantId_hashed", Background = true }),
// Unique event ID index
new(new BsonDocument("eventId", 1),
new CreateIndexOptions { Name = "linkset_event_eventId_unique", Unique = true, Background = true }),
// Outbox processing index (unpublished events)
new(new BsonDocument { { "publishedAt", 1 }, { "createdAt", 1 } },
new CreateIndexOptions { Name = "linkset_event_outbox", Background = true })
};
await collection.Indexes.CreateManyAsync(indexes, cancellationToken: ct).ConfigureAwait(false);
// TTL for event cleanup
var needsTtl = _options.EventRetention > TimeSpan.Zero;
if (needsTtl)
{
await EnsureTtlIndexAsync(
collection,
"createdAt",
"linkset_event_createdAt_ttl",
_options.EventRetention,
ct).ConfigureAwait(false);
}
}
private static async Task EnsureTtlIndexAsync(
IMongoCollection<BsonDocument> collection,
string field,
string indexName,
TimeSpan expiration,
CancellationToken ct)
{
using var cursor = await collection.Indexes.ListAsync(ct).ConfigureAwait(false);
var indexes = await cursor.ToListAsync(ct).ConfigureAwait(false);
var existing = indexes.FirstOrDefault(x =>
x.TryGetValue("name", out var name) &&
name.IsString &&
name.AsString == indexName);
if (existing is not null)
{
// Check if TTL value matches expected
if (existing.TryGetValue("expireAfterSeconds", out var expireAfter))
{
var expectedSeconds = (long)expiration.TotalSeconds;
if (expireAfter.ToInt64() == expectedSeconds)
{
return; // Index already correct
}
}
// Drop and recreate with correct TTL
await collection.Indexes.DropOneAsync(indexName, ct).ConfigureAwait(false);
}
var options = new CreateIndexOptions<BsonDocument>
{
Name = indexName,
ExpireAfter = expiration,
Background = true
};
var keys = Builders<BsonDocument>.IndexKeys.Ascending(field);
await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options), cancellationToken: ct).ConfigureAwait(false);
}
}

View File

@@ -1,32 +1,51 @@
using MongoDB.Driver;
namespace StellaOps.Concelier.Storage.Mongo;
public sealed class MongoStorageOptions
{
public string ConnectionString { get; set; } = string.Empty;
public string? DatabaseName { get; set; }
public TimeSpan CommandTimeout { get; set; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Retention period for raw documents (document + DTO + GridFS payloads).
/// Set to <see cref="TimeSpan.Zero"/> to disable automatic expiry.
/// </summary>
public TimeSpan RawDocumentRetention { get; set; } = TimeSpan.FromDays(45);
/// <summary>
/// Additional grace period applied on top of <see cref="RawDocumentRetention"/> before TTL purges old rows.
/// Allows the retention background service to delete GridFS blobs first.
/// </summary>
public TimeSpan RawDocumentRetentionTtlGrace { get; set; } = TimeSpan.FromDays(1);
/// <summary>
using MongoDB.Driver;
namespace StellaOps.Concelier.Storage.Mongo;
public sealed class MongoStorageOptions
{
public string ConnectionString { get; set; } = string.Empty;
public string? DatabaseName { get; set; }
public TimeSpan CommandTimeout { get; set; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Retention period for raw documents (document + DTO + GridFS payloads).
/// Set to <see cref="TimeSpan.Zero"/> to disable automatic expiry.
/// </summary>
public TimeSpan RawDocumentRetention { get; set; } = TimeSpan.FromDays(45);
/// <summary>
/// Additional grace period applied on top of <see cref="RawDocumentRetention"/> before TTL purges old rows.
/// Allows the retention background service to delete GridFS blobs first.
/// </summary>
public TimeSpan RawDocumentRetentionTtlGrace { get; set; } = TimeSpan.FromDays(1);
/// <summary>
/// Interval between retention sweeps. Only used when <see cref="RawDocumentRetention"/> is greater than zero.
/// </summary>
public TimeSpan RawDocumentRetentionSweepInterval { get; set; } = TimeSpan.FromHours(6);
/// <summary>
/// Retention period for observation documents (advisory_observations).
/// Set to <see cref="TimeSpan.Zero"/> to disable automatic expiry.
/// Per LNM-21-101-DEV: observations are append-only but may be TTL-pruned for storage efficiency.
/// </summary>
public TimeSpan ObservationRetention { get; set; } = TimeSpan.Zero;
/// <summary>
/// Retention period for linkset documents (advisory_linksets).
/// Set to <see cref="TimeSpan.Zero"/> to disable automatic expiry.
/// </summary>
public TimeSpan LinksetRetention { get; set; } = TimeSpan.Zero;
/// <summary>
/// Retention period for event documents (advisory_observation_events, advisory_linkset_events).
/// Set to <see cref="TimeSpan.Zero"/> to disable automatic expiry.
/// </summary>
public TimeSpan EventRetention { get; set; } = TimeSpan.FromDays(30);
/// <summary>
/// Enables dual-write of normalized SemVer analytics for affected packages.
/// </summary>
@@ -49,23 +68,23 @@ public sealed class MongoStorageOptions
public string GetDatabaseName()
{
if (!string.IsNullOrWhiteSpace(DatabaseName))
{
return DatabaseName.Trim();
}
if (!string.IsNullOrWhiteSpace(ConnectionString))
{
var url = MongoUrl.Create(ConnectionString);
if (!string.IsNullOrWhiteSpace(url.DatabaseName))
{
return url.DatabaseName;
}
}
return MongoStorageDefaults.DefaultDatabaseName;
}
if (!string.IsNullOrWhiteSpace(DatabaseName))
{
return DatabaseName.Trim();
}
if (!string.IsNullOrWhiteSpace(ConnectionString))
{
var url = MongoUrl.Create(ConnectionString);
if (!string.IsNullOrWhiteSpace(url.DatabaseName))
{
return url.DatabaseName;
}
}
return MongoStorageDefaults.DefaultDatabaseName;
}
public void EnsureValid()
{
var isTesting = string.Equals(
@@ -96,22 +115,22 @@ public sealed class MongoStorageOptions
{
throw new InvalidOperationException("Mongo connection string is not configured.");
}
if (CommandTimeout <= TimeSpan.Zero)
{
throw new InvalidOperationException("Command timeout must be greater than zero.");
}
if (RawDocumentRetention < TimeSpan.Zero)
{
throw new InvalidOperationException("Raw document retention cannot be negative.");
}
if (RawDocumentRetentionTtlGrace < TimeSpan.Zero)
{
throw new InvalidOperationException("Raw document retention TTL grace cannot be negative.");
}
if (CommandTimeout <= TimeSpan.Zero)
{
throw new InvalidOperationException("Command timeout must be greater than zero.");
}
if (RawDocumentRetention < TimeSpan.Zero)
{
throw new InvalidOperationException("Raw document retention cannot be negative.");
}
if (RawDocumentRetentionTtlGrace < TimeSpan.Zero)
{
throw new InvalidOperationException("Raw document retention TTL grace cannot be negative.");
}
if (RawDocumentRetention > TimeSpan.Zero && RawDocumentRetentionSweepInterval <= TimeSpan.Zero)
{
throw new InvalidOperationException("Raw document retention sweep interval must be positive when retention is enabled.");

View File

@@ -1,5 +1,5 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
@@ -28,7 +28,7 @@ using StellaOps.Concelier.Storage.Mongo.Orchestrator;
namespace StellaOps.Concelier.Storage.Mongo;
public static class ServiceCollectionExtensions
{
{
public static IServiceCollection AddMongoStorage(this IServiceCollection services, Action<MongoStorageOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
@@ -55,42 +55,42 @@ public static class ServiceCollectionExtensions
// Normal path: enforce validity.
options.EnsureValid();
});
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton<IMongoClient>(static sp =>
{
var options = sp.GetRequiredService<IOptions<MongoStorageOptions>>().Value;
return new MongoClient(options.ConnectionString);
});
services.AddSingleton(static sp =>
{
var options = sp.GetRequiredService<IOptions<MongoStorageOptions>>().Value;
var client = sp.GetRequiredService<IMongoClient>();
var settings = new MongoDatabaseSettings
{
ReadConcern = ReadConcern.Majority,
WriteConcern = WriteConcern.WMajority,
ReadPreference = ReadPreference.PrimaryPreferred,
};
var database = client.GetDatabase(options.GetDatabaseName(), settings);
var writeConcern = database.Settings.WriteConcern.With(wTimeout: options.CommandTimeout);
return database.WithWriteConcern(writeConcern);
});
services.AddScoped<IMongoSessionProvider, MongoSessionProvider>();
services.AddSingleton<MongoBootstrapper>();
services.AddSingleton<IJobStore, MongoJobStore>();
services.AddSingleton<ILeaseStore, MongoLeaseStore>();
services.AddSingleton<ISourceStateRepository, MongoSourceStateRepository>();
services.AddSingleton<IDocumentStore, DocumentStore>();
services.AddSingleton<IDtoStore, DtoStore>();
services.AddSingleton<IAdvisoryStore, AdvisoryStore>();
services.AddSingleton<IAliasStore, AliasStore>();
services.AddSingleton<IChangeHistoryStore, MongoChangeHistoryStore>();
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton<IMongoClient>(static sp =>
{
var options = sp.GetRequiredService<IOptions<MongoStorageOptions>>().Value;
return new MongoClient(options.ConnectionString);
});
services.AddSingleton(static sp =>
{
var options = sp.GetRequiredService<IOptions<MongoStorageOptions>>().Value;
var client = sp.GetRequiredService<IMongoClient>();
var settings = new MongoDatabaseSettings
{
ReadConcern = ReadConcern.Majority,
WriteConcern = WriteConcern.WMajority,
ReadPreference = ReadPreference.PrimaryPreferred,
};
var database = client.GetDatabase(options.GetDatabaseName(), settings);
var writeConcern = database.Settings.WriteConcern.With(wTimeout: options.CommandTimeout);
return database.WithWriteConcern(writeConcern);
});
services.AddScoped<IMongoSessionProvider, MongoSessionProvider>();
services.AddSingleton<MongoBootstrapper>();
services.AddSingleton<IJobStore, MongoJobStore>();
services.AddSingleton<ILeaseStore, MongoLeaseStore>();
services.AddSingleton<ISourceStateRepository, MongoSourceStateRepository>();
services.AddSingleton<IDocumentStore, DocumentStore>();
services.AddSingleton<IDtoStore, DtoStore>();
services.AddSingleton<IAdvisoryStore, AdvisoryStore>();
services.AddSingleton<IAliasStore, AliasStore>();
services.AddSingleton<IChangeHistoryStore, MongoChangeHistoryStore>();
services.AddSingleton<IJpFlagStore, JpFlagStore>();
services.AddSingleton<IPsirtFlagStore, PsirtFlagStore>();
services.AddSingleton<IMergeEventStore, MergeEventStore>();
@@ -123,13 +123,13 @@ public static class ServiceCollectionExtensions
services.AddSingleton<StellaOps.Concelier.Core.Linksets.IAdvisoryLinksetSink, StellaOps.Concelier.Storage.Mongo.Linksets.ConcelierMongoLinksetSink>();
services.AddSingleton<IExportStateStore, ExportStateStore>();
services.TryAddSingleton<ExportStateManager>();
services.AddSingleton<IMongoCollection<JobRunDocument>>(static sp =>
{
var database = sp.GetRequiredService<IMongoDatabase>();
return database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
});
services.AddSingleton<IMongoCollection<JobRunDocument>>(static sp =>
{
var database = sp.GetRequiredService<IMongoDatabase>();
return database.GetCollection<JobRunDocument>(MongoStorageDefaults.Collections.Jobs);
});
services.AddSingleton<IMongoCollection<JobLeaseDocument>>(static sp =>
{
var database = sp.GetRequiredService<IMongoDatabase>();
@@ -188,6 +188,8 @@ public static class ServiceCollectionExtensions
services.AddSingleton<IMongoMigration, EnsureAdvisoryObservationEventCollectionMigration>();
services.AddSingleton<IMongoMigration, SemVerStyleBackfillMigration>();
services.AddSingleton<IMongoMigration, EnsureOrchestratorCollectionsMigration>();
services.AddSingleton<IMongoMigration, EnsureLinkNotMergeCollectionsMigration>();
services.AddSingleton<IMongoMigration, EnsureLinkNotMergeShardingAndTtlMigration>();
services.AddSingleton<IOrchestratorRegistryStore, MongoOrchestratorRegistryStore>();

View File

@@ -0,0 +1,195 @@
using System.Collections.Immutable;
using System.Text.Json.Nodes;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.Models.Observations;
using StellaOps.Concelier.RawModels;
using Xunit;
namespace StellaOps.Concelier.Core.Tests.Aoc;
/// <summary>
/// Tests for <see cref="AdvisoryObservationWriteGuard"/> verifying append-only semantics
/// per LNM-21-004.
/// </summary>
public sealed class AdvisoryObservationWriteGuardTests
{
private readonly AdvisoryObservationWriteGuard _guard;
public AdvisoryObservationWriteGuardTests()
{
_guard = new AdvisoryObservationWriteGuard(NullLogger<AdvisoryObservationWriteGuard>.Instance);
}
[Fact]
public void ValidateWrite_NewObservation_ReturnsProceed()
{
// Arrange
var observation = CreateObservation("obs-1", "sha256:abc123");
// Act
var result = _guard.ValidateWrite(observation, existingContentHash: null);
// Assert
result.Should().Be(ObservationWriteDisposition.Proceed);
}
[Fact]
public void ValidateWrite_NewObservation_WithEmptyExistingHash_ReturnsProceed()
{
// Arrange
var observation = CreateObservation("obs-2", "sha256:def456");
// Act
var result = _guard.ValidateWrite(observation, existingContentHash: "");
// Assert
result.Should().Be(ObservationWriteDisposition.Proceed);
}
[Fact]
public void ValidateWrite_NewObservation_WithWhitespaceExistingHash_ReturnsProceed()
{
// Arrange
var observation = CreateObservation("obs-3", "sha256:ghi789");
// Act
var result = _guard.ValidateWrite(observation, existingContentHash: " ");
// Assert
result.Should().Be(ObservationWriteDisposition.Proceed);
}
[Fact]
public void ValidateWrite_IdenticalContent_ReturnsSkipIdentical()
{
// Arrange
const string contentHash = "sha256:abc123";
var observation = CreateObservation("obs-4", contentHash);
// Act
var result = _guard.ValidateWrite(observation, existingContentHash: contentHash);
// Assert
result.Should().Be(ObservationWriteDisposition.SkipIdentical);
}
[Fact]
public void ValidateWrite_IdenticalContent_CaseInsensitive_ReturnsSkipIdentical()
{
// Arrange
var observation = CreateObservation("obs-5", "SHA256:ABC123");
// Act
var result = _guard.ValidateWrite(observation, existingContentHash: "sha256:abc123");
// Assert
result.Should().Be(ObservationWriteDisposition.SkipIdentical);
}
[Fact]
public void ValidateWrite_DifferentContent_ReturnsRejectMutation()
{
// Arrange
var observation = CreateObservation("obs-6", "sha256:newcontent");
// Act
var result = _guard.ValidateWrite(observation, existingContentHash: "sha256:oldcontent");
// Assert
result.Should().Be(ObservationWriteDisposition.RejectMutation);
}
[Fact]
public void ValidateWrite_NullObservation_ThrowsArgumentNullException()
{
// Act
var act = () => _guard.ValidateWrite(null!, existingContentHash: null);
// Assert
act.Should().Throw<ArgumentNullException>()
.WithParameterName("observation");
}
[Theory]
[InlineData("sha256:a", "sha256:b")]
[InlineData("sha256:hash1", "sha256:hash2")]
[InlineData("md5:abc", "sha256:abc")]
public void ValidateWrite_ContentMismatch_ReturnsRejectMutation(string newHash, string existingHash)
{
// Arrange
var observation = CreateObservation("obs-mutation", newHash);
// Act
var result = _guard.ValidateWrite(observation, existingHash);
// Assert
result.Should().Be(ObservationWriteDisposition.RejectMutation);
}
[Theory]
[InlineData("sha256:identical")]
[InlineData("SHA256:IDENTICAL")]
[InlineData("sha512:longerhash1234567890")]
public void ValidateWrite_ExactMatch_ReturnsSkipIdentical(string hash)
{
// Arrange
var observation = CreateObservation("obs-idempotent", hash);
// Act
var result = _guard.ValidateWrite(observation, hash);
// Assert
result.Should().Be(ObservationWriteDisposition.SkipIdentical);
}
private static AdvisoryObservation CreateObservation(string observationId, string contentHash)
{
var source = new AdvisoryObservationSource(
vendor: "test-vendor",
stream: "test-stream",
api: "test-api",
collectorVersion: "1.0.0");
var signature = new AdvisoryObservationSignature(
present: false,
format: null,
keyId: null,
signature: null);
var upstream = new AdvisoryObservationUpstream(
upstreamId: $"upstream-{observationId}",
documentVersion: "1.0",
fetchedAt: DateTimeOffset.UtcNow,
receivedAt: DateTimeOffset.UtcNow,
contentHash: contentHash,
signature: signature);
var content = new AdvisoryObservationContent(
format: "csaf",
specVersion: "2.0",
raw: JsonNode.Parse("{\"test\": true}")!);
var linkset = new AdvisoryObservationLinkset(
aliases: new[] { "CVE-2024-0001" },
purls: null,
cpes: null,
references: null);
var rawLinkset = new RawLinkset
{
Aliases = ImmutableArray.Create("CVE-2024-0001")
};
return new AdvisoryObservation(
observationId: observationId,
tenant: "test-tenant",
source: source,
upstream: upstream,
content: content,
linkset: linkset,
rawLinkset: rawLinkset,
createdAt: DateTimeOffset.UtcNow);
}
}

View File

@@ -0,0 +1,232 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Concelier.Core.Linksets;
using Xunit;
namespace StellaOps.Concelier.Core.Tests.Linksets;
/// <summary>
/// Tests for <see cref="AdvisoryLinksetUpdatedEvent"/> verifying event contract compliance
/// per LNM-21-005.
/// </summary>
public sealed class AdvisoryLinksetUpdatedEventTests
{
[Fact]
public void FromLinkset_NewLinkset_CreatesEventWithCreatedDelta()
{
// Arrange
var linkset = CreateLinkset("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1", "obs-2" });
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(
linkset,
previousLinkset: null,
linksetId: "linkset-123",
traceId: "trace-456");
// Assert
@event.TenantId.Should().Be("urn:tenant:tenant-1");
@event.LinksetId.Should().Be("linkset-123");
@event.AdvisoryId.Should().Be("CVE-2024-1234");
@event.Source.Should().Be("nvd");
@event.ObservationIds.Should().ContainInOrder("obs-1", "obs-2");
@event.Delta.Type.Should().Be("created");
@event.Delta.ObservationsAdded.Should().ContainInOrder("obs-1", "obs-2");
@event.Delta.ObservationsRemoved.Should().BeEmpty();
@event.TraceId.Should().Be("trace-456");
}
[Fact]
public void FromLinkset_UpdatedLinkset_CreatesEventWithUpdatedDelta()
{
// Arrange
var previousLinkset = CreateLinkset("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1", "obs-2" });
var currentLinkset = CreateLinkset("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-2", "obs-3" });
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(
currentLinkset,
previousLinkset,
linksetId: "linkset-123",
traceId: null);
// Assert
@event.Delta.Type.Should().Be("updated");
@event.Delta.ObservationsAdded.Should().Contain("obs-3");
@event.Delta.ObservationsRemoved.Should().Contain("obs-1");
}
[Fact]
public void FromLinkset_TenantAlreadyUrn_PreservesFormat()
{
// Arrange
var linkset = CreateLinkset("urn:tenant:already-formatted", "ghsa", "GHSA-1234", new[] { "obs-1" });
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(linkset, null, "linkset-1", null);
// Assert
@event.TenantId.Should().Be("urn:tenant:already-formatted");
}
[Fact]
public void FromLinkset_WithConflicts_IncludesConflictSummaries()
{
// Arrange
var conflicts = new List<AdvisoryLinksetConflict>
{
new("severity", "severity-mismatch", new[] { "nvd:9.8", "ghsa:8.5" }, new[] { "nvd", "ghsa" }),
new("aliases", "alias-inconsistency", new[] { "CVE-2024-1234", "CVE-2024-5678" }, null)
};
var linkset = CreateLinksetWithConflicts("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" }, conflicts);
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(linkset, null, "linkset-1", null);
// Assert
@event.Conflicts.Should().HaveCount(2);
@event.Conflicts[0].Field.Should().Be("aliases"); // Sorted by field
@event.Conflicts[1].Field.Should().Be("severity");
}
[Fact]
public void FromLinkset_WithProvenance_IncludesProvenanceSummary()
{
// Arrange
var provenance = new AdvisoryLinksetProvenance(
ObservationHashes: new[] { "sha256:abc123", "sha256:def456" },
ToolVersion: "1.0.0",
PolicyHash: "policy-hash-123");
var linkset = CreateLinksetWithProvenance("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" }, provenance);
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(linkset, null, "linkset-1", null);
// Assert
@event.Provenance.ObservationHashes.Should().ContainInOrder("sha256:abc123", "sha256:def456");
@event.Provenance.ToolVersion.Should().Be("1.0.0");
@event.Provenance.PolicyHash.Should().Be("policy-hash-123");
}
[Fact]
public void FromLinkset_ConfidenceChanged_SetsConfidenceChangedFlag()
{
// Arrange
var previousLinkset = CreateLinksetWithConfidence("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" }, 0.7);
var currentLinkset = CreateLinksetWithConfidence("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" }, 0.85);
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(currentLinkset, previousLinkset, "linkset-1", null);
// Assert
@event.Delta.ConfidenceChanged.Should().BeTrue();
@event.Confidence.Should().Be(0.85);
}
[Fact]
public void FromLinkset_SameConfidence_SetsConfidenceChangedFlagFalse()
{
// Arrange
var previousLinkset = CreateLinksetWithConfidence("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" }, 0.85);
var currentLinkset = CreateLinksetWithConfidence("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" }, 0.85);
// Act
var @event = AdvisoryLinksetUpdatedEvent.FromLinkset(currentLinkset, previousLinkset, "linkset-1", null);
// Assert
@event.Delta.ConfidenceChanged.Should().BeFalse();
}
[Fact]
public void FromLinkset_GeneratesUniqueEventId()
{
// Arrange
var linkset = CreateLinkset("tenant-1", "nvd", "CVE-2024-1234", new[] { "obs-1" });
// Act
var event1 = AdvisoryLinksetUpdatedEvent.FromLinkset(linkset, null, "linkset-1", null);
var event2 = AdvisoryLinksetUpdatedEvent.FromLinkset(linkset, null, "linkset-1", null);
// Assert
event1.EventId.Should().NotBe(event2.EventId);
event1.EventId.Should().NotBe(Guid.Empty);
}
[Fact]
public void FromLinkset_NullLinkset_ThrowsArgumentNullException()
{
// Act
var act = () => AdvisoryLinksetUpdatedEvent.FromLinkset(null!, null, "linkset-1", null);
// Assert
act.Should().Throw<ArgumentNullException>()
.WithParameterName("linkset");
}
private static AdvisoryLinkset CreateLinkset(string tenant, string source, string advisoryId, string[] observationIds)
{
return new AdvisoryLinkset(
TenantId: tenant,
Source: source,
AdvisoryId: advisoryId,
ObservationIds: observationIds.ToImmutableArray(),
Normalized: null,
Provenance: null,
Confidence: null,
Conflicts: null,
CreatedAt: DateTimeOffset.UtcNow,
BuiltByJobId: null);
}
private static AdvisoryLinkset CreateLinksetWithConflicts(
string tenant, string source, string advisoryId, string[] observationIds, IReadOnlyList<AdvisoryLinksetConflict> conflicts)
{
return new AdvisoryLinkset(
TenantId: tenant,
Source: source,
AdvisoryId: advisoryId,
ObservationIds: observationIds.ToImmutableArray(),
Normalized: null,
Provenance: null,
Confidence: null,
Conflicts: conflicts,
CreatedAt: DateTimeOffset.UtcNow,
BuiltByJobId: null);
}
private static AdvisoryLinkset CreateLinksetWithProvenance(
string tenant, string source, string advisoryId, string[] observationIds, AdvisoryLinksetProvenance provenance)
{
return new AdvisoryLinkset(
TenantId: tenant,
Source: source,
AdvisoryId: advisoryId,
ObservationIds: observationIds.ToImmutableArray(),
Normalized: null,
Provenance: provenance,
Confidence: null,
Conflicts: null,
CreatedAt: DateTimeOffset.UtcNow,
BuiltByJobId: null);
}
private static AdvisoryLinkset CreateLinksetWithConfidence(
string tenant, string source, string advisoryId, string[] observationIds, double? confidence)
{
return new AdvisoryLinkset(
TenantId: tenant,
Source: source,
AdvisoryId: advisoryId,
ObservationIds: observationIds.ToImmutableArray(),
Normalized: null,
Provenance: null,
Confidence: confidence,
Conflicts: null,
CreatedAt: DateTimeOffset.UtcNow,
BuiltByJobId: null);
}
}

View File

@@ -2,14 +2,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Ingestion.Telemetry/StellaOps.Ingestion.Telemetry.csproj" />
<ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" />
<PackageReference Include="FluentAssertions" Version="6.12.0" PrivateAssets="All" />
<!-- Test packages inherited from Directory.Build.props -->
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
</ItemGroup>
</Project>