This commit is contained in:
StellaOps Bot
2025-12-09 00:20:52 +02:00
parent 3d01bf9edc
commit bc0762e97d
261 changed files with 14033 additions and 4427 deletions

View File

@@ -0,0 +1,143 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.Linq;
using System.Text.Json;
using StellaOps.Concelier.Core.Linksets;
namespace StellaOps.Concelier.Core.Diagnostics;
/// <summary>
/// Metrics exported for Vuln Explorer consumers (fact-only telemetry).
/// </summary>
public static class VulnExplorerTelemetry
{
public const string MeterName = "StellaOps.Concelier.VulnExplorer";
private static readonly Meter Meter = new(MeterName);
private static readonly Counter<long> IdentifierCollisionCounter = Meter.CreateCounter<long>(
"vuln.identifier_collisions_total",
unit: "collision",
description: "Identifier/alias collisions detected while aggregating linksets for Vuln Explorer.");
private static readonly Counter<long> WithdrawnStatementCounter = Meter.CreateCounter<long>(
"vuln.withdrawn_statements_total",
unit: "statement",
description: "Withdrawn advisory observations detected by change emitters.");
private static readonly Counter<long> ChunkRequestCounter = Meter.CreateCounter<long>(
"vuln.chunk_requests_total",
unit: "request",
description: "Advisory chunk requests served for Vuln Explorer evidence panels.");
private static readonly Histogram<double> ChunkLatencyHistogram = Meter.CreateHistogram<double>(
"vuln.chunk_latency_ms",
unit: "ms",
description: "Latency to build advisory chunks (fact-only) for Vuln Explorer.");
public static void RecordIdentifierCollisions(string tenant, string? source, int collisions)
{
if (collisions <= 0 || string.IsNullOrWhiteSpace(tenant))
{
return;
}
var tags = new[]
{
KeyValuePair.Create<string, object?>("tenant", tenant),
KeyValuePair.Create<string, object?>("source", source ?? "unknown")
};
IdentifierCollisionCounter.Add(collisions, tags);
}
public static int CountAliasCollisions(IReadOnlyList<AdvisoryLinksetConflict>? conflicts)
{
if (conflicts is null || conflicts.Count == 0)
{
return 0;
}
return conflicts.Count(conflict =>
string.Equals(conflict.Reason, "alias-inconsistency", StringComparison.OrdinalIgnoreCase) ||
string.Equals(conflict.Field, "aliases", StringComparison.OrdinalIgnoreCase));
}
public static void RecordWithdrawnStatement(string tenant, string? source)
{
if (string.IsNullOrWhiteSpace(tenant))
{
return;
}
var tags = new[]
{
KeyValuePair.Create<string, object?>("tenant", tenant),
KeyValuePair.Create<string, object?>("source", source ?? "unknown")
};
WithdrawnStatementCounter.Add(1, tags);
}
public static void RecordChunkRequest(string tenant, string result, bool cacheHit, int chunkCount, double latencyMs)
{
if (string.IsNullOrWhiteSpace(tenant))
{
return;
}
var sanitizedResult = string.IsNullOrWhiteSpace(result) ? "unknown" : result.Trim().ToLowerInvariant();
var safeLatency = latencyMs < 0 ? 0d : latencyMs;
var normalizedChunkCount = chunkCount < 0 ? 0 : chunkCount;
var tags = new[]
{
KeyValuePair.Create<string, object?>("tenant", tenant),
KeyValuePair.Create<string, object?>("result", sanitizedResult),
KeyValuePair.Create<string, object?>("cache_hit", cacheHit),
KeyValuePair.Create<string, object?>("chunk_count", normalizedChunkCount)
};
ChunkRequestCounter.Add(1, tags);
ChunkLatencyHistogram.Record(safeLatency, tags);
}
public static void RecordChunkLatency(string tenant, string? source, TimeSpan duration)
{
if (string.IsNullOrWhiteSpace(tenant))
{
return;
}
var tags = new[]
{
KeyValuePair.Create<string, object?>("tenant", tenant),
KeyValuePair.Create<string, object?>("source", source ?? "unknown")
};
ChunkLatencyHistogram.Record(Math.Max(0, duration.TotalMilliseconds), tags);
}
public static bool IsWithdrawn(JsonElement content)
{
if (content.ValueKind != JsonValueKind.Object)
{
return false;
}
if (content.TryGetProperty("withdrawn", out var withdrawnElement) &&
withdrawnElement.ValueKind == JsonValueKind.True)
{
return true;
}
if (content.TryGetProperty("withdrawn_at", out var withdrawnAtElement) &&
withdrawnAtElement.ValueKind is JsonValueKind.String)
{
return !string.IsNullOrWhiteSpace(withdrawnAtElement.GetString());
}
return false;
}
}

View File

@@ -7,7 +7,7 @@ using StellaOps.Concelier.Normalization.SemVer;
namespace StellaOps.Concelier.Core.Linksets;
internal static class AdvisoryLinksetNormalization
public static class AdvisoryLinksetNormalization
{
public static AdvisoryLinksetNormalized? FromRawLinkset(RawLinkset linkset)
{

View File

@@ -5,192 +5,194 @@ using StellaOps.Concelier.Models;
using StellaOps.Concelier.Models.Observations;
using StellaOps.Concelier.RawModels;
using StellaOps.Concelier.Core.Linksets;
namespace StellaOps.Concelier.Core.Observations;
/// <summary>
/// Default implementation of <see cref="IAdvisoryObservationQueryService"/> that projects raw observations for overlay consumers.
/// </summary>
public sealed class AdvisoryObservationQueryService : IAdvisoryObservationQueryService
{
private const int DefaultPageSize = 200;
private const int MaxPageSize = 500;
private readonly IAdvisoryObservationLookup _lookup;
public AdvisoryObservationQueryService(IAdvisoryObservationLookup lookup)
{
_lookup = lookup ?? throw new ArgumentNullException(nameof(lookup));
}
public async ValueTask<AdvisoryObservationQueryResult> QueryAsync(
AdvisoryObservationQueryOptions options,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(options);
cancellationToken.ThrowIfCancellationRequested();
var normalizedTenant = NormalizeTenant(options.Tenant);
var normalizedObservationIds = NormalizeSet(options.ObservationIds, static value => value, StringComparer.Ordinal);
using StellaOps.Concelier.Core.Diagnostics;
namespace StellaOps.Concelier.Core.Observations;
/// <summary>
/// Default implementation of <see cref="IAdvisoryObservationQueryService"/> that projects raw observations for overlay consumers.
/// </summary>
public sealed class AdvisoryObservationQueryService : IAdvisoryObservationQueryService
{
private const int DefaultPageSize = 200;
private const int MaxPageSize = 500;
private readonly IAdvisoryObservationLookup _lookup;
public AdvisoryObservationQueryService(IAdvisoryObservationLookup lookup)
{
_lookup = lookup ?? throw new ArgumentNullException(nameof(lookup));
}
public async ValueTask<AdvisoryObservationQueryResult> QueryAsync(
AdvisoryObservationQueryOptions options,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(options);
cancellationToken.ThrowIfCancellationRequested();
var normalizedTenant = NormalizeTenant(options.Tenant);
var normalizedObservationIds = NormalizeSet(options.ObservationIds, static value => value, StringComparer.Ordinal);
var normalizedAliases = NormalizeSet(options.Aliases, static value => value, StringComparer.OrdinalIgnoreCase);
var normalizedPurls = NormalizeSet(options.Purls, static value => value, StringComparer.Ordinal);
var normalizedCpes = NormalizeSet(options.Cpes, static value => value, StringComparer.Ordinal);
var limit = NormalizeLimit(options.Limit);
var fetchSize = checked(limit + 1);
var cursor = DecodeCursor(options.Cursor);
var observations = await _lookup
.FindByFiltersAsync(
normalizedTenant,
normalizedObservationIds,
normalizedAliases,
normalizedPurls,
normalizedCpes,
cursor,
fetchSize,
cancellationToken)
.ConfigureAwait(false);
var ordered = observations
.Where(observation => Matches(observation, normalizedObservationIds, normalizedAliases, normalizedPurls, normalizedCpes))
.OrderByDescending(static observation => observation.CreatedAt)
.ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal)
.ToImmutableArray();
var hasMore = ordered.Length > limit;
var page = hasMore ? ordered.Take(limit).ToImmutableArray() : ordered;
var nextCursor = hasMore ? EncodeCursor(page[^1]) : null;
var linkset = BuildAggregateLinkset(page);
return new AdvisoryObservationQueryResult(page, linkset, nextCursor, hasMore);
}
private static bool Matches(
AdvisoryObservation observation,
ImmutableHashSet<string> observationIds,
ImmutableHashSet<string> aliases,
ImmutableHashSet<string> purls,
ImmutableHashSet<string> cpes)
{
ArgumentNullException.ThrowIfNull(observation);
if (observationIds.Count > 0 && !observationIds.Contains(observation.ObservationId))
{
return false;
}
if (aliases.Count > 0 && !observation.Linkset.Aliases.Any(aliases.Contains))
{
return false;
}
if (purls.Count > 0 && !observation.Linkset.Purls.Any(purls.Contains))
{
return false;
}
if (cpes.Count > 0 && !observation.Linkset.Cpes.Any(cpes.Contains))
{
return false;
}
return true;
}
private static string NormalizeTenant(string tenant)
=> Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
private static ImmutableHashSet<string> NormalizeSet(
IEnumerable<string>? values,
Func<string, string> projector,
StringComparer comparer)
{
if (values is null)
{
return ImmutableHashSet<string>.Empty;
}
var builder = ImmutableHashSet.CreateBuilder<string>(comparer);
foreach (var value in values)
{
var normalized = Validation.TrimToNull(value);
if (normalized is null)
{
continue;
}
builder.Add(projector(normalized));
}
return builder.ToImmutable();
}
private static int NormalizeLimit(int? requestedLimit)
{
if (!requestedLimit.HasValue || requestedLimit.Value <= 0)
{
return DefaultPageSize;
}
var limit = requestedLimit.Value;
if (limit > MaxPageSize)
{
return MaxPageSize;
}
return limit;
}
private static AdvisoryObservationCursor? DecodeCursor(string? cursor)
{
if (string.IsNullOrWhiteSpace(cursor))
{
return null;
}
try
{
var decoded = Convert.FromBase64String(cursor.Trim());
var payload = Encoding.UTF8.GetString(decoded);
var separator = payload.IndexOf(':');
if (separator <= 0 || separator >= payload.Length - 1)
{
throw new FormatException("Cursor is malformed.");
}
var ticksText = payload.AsSpan(0, separator);
if (!long.TryParse(ticksText, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks))
{
throw new FormatException("Cursor timestamp is invalid.");
}
var createdAt = new DateTimeOffset(DateTime.SpecifyKind(new DateTime(ticks), DateTimeKind.Utc));
var observationId = payload[(separator + 1)..];
if (string.IsNullOrWhiteSpace(observationId))
{
throw new FormatException("Cursor observation id is missing.");
}
return new AdvisoryObservationCursor(createdAt, observationId);
}
catch (FormatException)
{
throw;
}
catch (Exception ex)
{
throw new FormatException("Cursor is malformed.", ex);
}
}
private static string? EncodeCursor(AdvisoryObservation observation)
{
if (observation is null)
{
return null;
}
var normalizedPurls = NormalizeSet(options.Purls, static value => value, StringComparer.Ordinal);
var normalizedCpes = NormalizeSet(options.Cpes, static value => value, StringComparer.Ordinal);
var limit = NormalizeLimit(options.Limit);
var fetchSize = checked(limit + 1);
var cursor = DecodeCursor(options.Cursor);
var observations = await _lookup
.FindByFiltersAsync(
normalizedTenant,
normalizedObservationIds,
normalizedAliases,
normalizedPurls,
normalizedCpes,
cursor,
fetchSize,
cancellationToken)
.ConfigureAwait(false);
var ordered = observations
.Where(observation => Matches(observation, normalizedObservationIds, normalizedAliases, normalizedPurls, normalizedCpes))
.OrderByDescending(static observation => observation.CreatedAt)
.ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal)
.ToImmutableArray();
var hasMore = ordered.Length > limit;
var page = hasMore ? ordered.Take(limit).ToImmutableArray() : ordered;
var nextCursor = hasMore ? EncodeCursor(page[^1]) : null;
var linkset = BuildAggregateLinkset(page);
RecordIdentifierCollisions(normalizedTenant, linkset);
return new AdvisoryObservationQueryResult(page, linkset, nextCursor, hasMore);
}
private static bool Matches(
AdvisoryObservation observation,
ImmutableHashSet<string> observationIds,
ImmutableHashSet<string> aliases,
ImmutableHashSet<string> purls,
ImmutableHashSet<string> cpes)
{
ArgumentNullException.ThrowIfNull(observation);
if (observationIds.Count > 0 && !observationIds.Contains(observation.ObservationId))
{
return false;
}
if (aliases.Count > 0 && !observation.Linkset.Aliases.Any(aliases.Contains))
{
return false;
}
if (purls.Count > 0 && !observation.Linkset.Purls.Any(purls.Contains))
{
return false;
}
if (cpes.Count > 0 && !observation.Linkset.Cpes.Any(cpes.Contains))
{
return false;
}
return true;
}
private static string NormalizeTenant(string tenant)
=> Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant();
private static ImmutableHashSet<string> NormalizeSet(
IEnumerable<string>? values,
Func<string, string> projector,
StringComparer comparer)
{
if (values is null)
{
return ImmutableHashSet<string>.Empty;
}
var builder = ImmutableHashSet.CreateBuilder<string>(comparer);
foreach (var value in values)
{
var normalized = Validation.TrimToNull(value);
if (normalized is null)
{
continue;
}
builder.Add(projector(normalized));
}
return builder.ToImmutable();
}
private static int NormalizeLimit(int? requestedLimit)
{
if (!requestedLimit.HasValue || requestedLimit.Value <= 0)
{
return DefaultPageSize;
}
var limit = requestedLimit.Value;
if (limit > MaxPageSize)
{
return MaxPageSize;
}
return limit;
}
private static AdvisoryObservationCursor? DecodeCursor(string? cursor)
{
if (string.IsNullOrWhiteSpace(cursor))
{
return null;
}
try
{
var decoded = Convert.FromBase64String(cursor.Trim());
var payload = Encoding.UTF8.GetString(decoded);
var separator = payload.IndexOf(':');
if (separator <= 0 || separator >= payload.Length - 1)
{
throw new FormatException("Cursor is malformed.");
}
var ticksText = payload.AsSpan(0, separator);
if (!long.TryParse(ticksText, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks))
{
throw new FormatException("Cursor timestamp is invalid.");
}
var createdAt = new DateTimeOffset(DateTime.SpecifyKind(new DateTime(ticks), DateTimeKind.Utc));
var observationId = payload[(separator + 1)..];
if (string.IsNullOrWhiteSpace(observationId))
{
throw new FormatException("Cursor observation id is missing.");
}
return new AdvisoryObservationCursor(createdAt, observationId);
}
catch (FormatException)
{
throw;
}
catch (Exception ex)
{
throw new FormatException("Cursor is malformed.", ex);
}
}
private static string? EncodeCursor(AdvisoryObservation observation)
{
if (observation is null)
{
return null;
}
var payload = $"{observation.CreatedAt.UtcTicks.ToString(CultureInfo.InvariantCulture)}:{observation.ObservationId}";
return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload));
}
@@ -283,4 +285,18 @@ public sealed class AdvisoryObservationQueryService : IAdvisoryObservationQueryS
.ThenBy(static c => string.Join('|', c.Values ?? Array.Empty<string>()), StringComparer.Ordinal)
.ToImmutableArray());
}
private static void RecordIdentifierCollisions(string tenant, AdvisoryObservationLinksetAggregate linkset)
{
if (linkset.Conflicts.IsDefaultOrEmpty)
{
return;
}
var collisionCount = linkset.Conflicts.Count(conflict =>
string.Equals(conflict.Field, "aliases", StringComparison.OrdinalIgnoreCase) &&
conflict.Reason.Contains("alias", StringComparison.OrdinalIgnoreCase));
VulnExplorerTelemetry.RecordIdentifierCollisions(tenant, source: null, collisionCount);
}
}

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.Diagnostics;
namespace StellaOps.Concelier.Core.Risk;
@@ -177,6 +178,7 @@ public sealed class AdvisoryFieldChangeEmitter : IAdvisoryFieldChangeEmitter
_logger.LogInformation(
"Emitted withdrawn observation notification for {ObservationId}",
previousSignal.ObservationId);
VulnExplorerTelemetry.RecordWithdrawnStatement(tenantId, previousSignal.Provenance.Vendor);
return notification;
}

View File

@@ -12,6 +12,7 @@
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
<PackageReference Include="Cronos" Version="0.10.0" />
<PackageReference Include="System.Diagnostics.DiagnosticSource" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />

View File

@@ -1,5 +1,6 @@
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
@@ -66,6 +67,8 @@ namespace MongoDB.Driver
public class MongoDatabase : IMongoDatabase
{
private readonly ConcurrentDictionary<string, object> _collections = new(StringComparer.Ordinal);
public MongoDatabase(string name)
{
Name = name;
@@ -73,8 +76,17 @@ namespace MongoDB.Driver
}
public string Name { get; }
public DatabaseNamespace DatabaseNamespace { get; }
public IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null) => new MongoCollection<TDocument>(name);
public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask;
public IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null)
{
var collection = (MongoCollection<TDocument>)_collections.GetOrAdd(name, _ => new MongoCollection<TDocument>(name));
return collection;
}
public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default)
{
_collections.TryRemove(name, out _);
return Task.CompletedTask;
}
public BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default) => new();
public T RunCommand<T>(BsonDocument command, CancellationToken cancellationToken = default) => default!;
public Task<T> RunCommandAsync<T>(BsonDocument command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!);

View File

@@ -5,6 +5,7 @@
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
@@ -12,4 +13,7 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Include="**\*.cs" Exclude="bin\**;obj\**;out\**;bin2\**" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,69 @@
-- Concelier Migration 005: Postgres equivalents for DTO, export, PSIRT/JP flags, and change history.
CREATE SCHEMA IF NOT EXISTS concelier;
CREATE TABLE IF NOT EXISTS concelier.dtos (
id UUID NOT NULL,
document_id UUID NOT NULL,
source_name TEXT NOT NULL,
format TEXT NOT NULL,
payload_json JSONB NOT NULL,
schema_version TEXT NOT NULL DEFAULT '',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
validated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT pk_concelier_dtos PRIMARY KEY (document_id)
);
CREATE INDEX IF NOT EXISTS idx_concelier_dtos_source ON concelier.dtos(source_name, created_at DESC);
CREATE TABLE IF NOT EXISTS concelier.export_states (
id TEXT NOT NULL,
export_cursor TEXT NOT NULL,
last_full_digest TEXT,
last_delta_digest TEXT,
base_export_id TEXT,
base_digest TEXT,
target_repository TEXT,
files JSONB NOT NULL,
exporter_version TEXT NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT pk_concelier_export_states PRIMARY KEY (id)
);
CREATE TABLE IF NOT EXISTS concelier.psirt_flags (
advisory_id TEXT NOT NULL,
vendor TEXT NOT NULL,
source_name TEXT NOT NULL,
external_id TEXT,
recorded_at TIMESTAMPTZ NOT NULL,
CONSTRAINT pk_concelier_psirt_flags PRIMARY KEY (advisory_id, vendor)
);
CREATE INDEX IF NOT EXISTS idx_concelier_psirt_source ON concelier.psirt_flags(source_name, recorded_at DESC);
CREATE TABLE IF NOT EXISTS concelier.jp_flags (
advisory_key TEXT NOT NULL,
source_name TEXT NOT NULL,
category TEXT NOT NULL,
vendor_status TEXT,
created_at TIMESTAMPTZ NOT NULL,
CONSTRAINT pk_concelier_jp_flags PRIMARY KEY (advisory_key)
);
CREATE TABLE IF NOT EXISTS concelier.change_history (
id UUID NOT NULL,
source_name TEXT NOT NULL,
advisory_key TEXT NOT NULL,
document_id UUID NOT NULL,
document_hash TEXT NOT NULL,
snapshot_hash TEXT NOT NULL,
previous_snapshot_hash TEXT,
snapshot JSONB NOT NULL,
previous_snapshot JSONB,
changes JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL,
CONSTRAINT pk_concelier_change_history PRIMARY KEY (id)
);
CREATE INDEX IF NOT EXISTS idx_concelier_change_history_advisory
ON concelier.change_history(advisory_key, created_at DESC);

View File

@@ -0,0 +1,96 @@
using System.Text.Json;
using Dapper;
using StellaOps.Concelier.Storage.Mongo.ChangeHistory;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
internal sealed class PostgresChangeHistoryStore : IChangeHistoryStore
{
private readonly ConcelierDataSource _dataSource;
private readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.General)
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public PostgresChangeHistoryStore(ConcelierDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task AddAsync(ChangeHistoryRecord record, CancellationToken cancellationToken)
{
const string sql = """
INSERT INTO concelier.change_history
(id, source_name, advisory_key, document_id, document_hash, snapshot_hash, previous_snapshot_hash, snapshot, previous_snapshot, changes, created_at)
VALUES (@Id, @SourceName, @AdvisoryKey, @DocumentId, @DocumentHash, @SnapshotHash, @PreviousSnapshotHash, @Snapshot, @PreviousSnapshot, @Changes, @CreatedAt)
ON CONFLICT (id) DO NOTHING;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
record.Id,
record.SourceName,
record.AdvisoryKey,
record.DocumentId,
record.DocumentHash,
record.SnapshotHash,
record.PreviousSnapshotHash,
Snapshot = record.Snapshot,
PreviousSnapshot = record.PreviousSnapshot,
Changes = JsonSerializer.Serialize(record.Changes, _jsonOptions),
record.CreatedAt
}, cancellationToken: cancellationToken));
}
public async Task<IReadOnlyList<ChangeHistoryRecord>> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken)
{
const string sql = """
SELECT id, source_name, advisory_key, document_id, document_hash, snapshot_hash, previous_snapshot_hash, snapshot, previous_snapshot, changes, created_at
FROM concelier.change_history
WHERE source_name = @SourceName AND advisory_key = @AdvisoryKey
ORDER BY created_at DESC
LIMIT @Limit;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var rows = await connection.QueryAsync<ChangeHistoryRow>(new CommandDefinition(sql, new
{
SourceName = sourceName,
AdvisoryKey = advisoryKey,
Limit = limit
}, cancellationToken: cancellationToken));
return rows.Select(ToRecord).ToArray();
}
private ChangeHistoryRecord ToRecord(ChangeHistoryRow row)
{
var changes = JsonSerializer.Deserialize<IReadOnlyList<ChangeHistoryFieldChange>>(row.Changes, _jsonOptions) ?? Array.Empty<ChangeHistoryFieldChange>();
return new ChangeHistoryRecord(
row.Id,
row.SourceName,
row.AdvisoryKey,
row.DocumentId,
row.DocumentHash,
row.SnapshotHash,
row.PreviousSnapshotHash ?? string.Empty,
row.Snapshot,
row.PreviousSnapshot ?? string.Empty,
changes,
row.CreatedAt);
}
private sealed record ChangeHistoryRow(
Guid Id,
string SourceName,
string AdvisoryKey,
Guid DocumentId,
string DocumentHash,
string SnapshotHash,
string? PreviousSnapshotHash,
string Snapshot,
string? PreviousSnapshot,
string Changes,
DateTimeOffset CreatedAt);
}

View File

@@ -0,0 +1,104 @@
using System.Text.Json;
using Dapper;
using StellaOps.Concelier.Storage.Mongo;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
internal sealed class PostgresDtoStore : IDtoStore
{
private readonly ConcelierDataSource _dataSource;
private readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.General)
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public PostgresDtoStore(ConcelierDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken)
{
const string sql = """
INSERT INTO concelier.dtos (id, document_id, source_name, format, payload_json, schema_version, created_at, validated_at)
VALUES (@Id, @DocumentId, @SourceName, @Format, @PayloadJson, @SchemaVersion, @CreatedAt, @ValidatedAt)
ON CONFLICT (document_id) DO UPDATE
SET payload_json = EXCLUDED.payload_json,
schema_version = EXCLUDED.schema_version,
source_name = EXCLUDED.source_name,
format = EXCLUDED.format,
validated_at = EXCLUDED.validated_at
RETURNING id, document_id, source_name, format, payload_json, schema_version, created_at, validated_at;
""";
var payloadJson = record.Payload.ToJson();
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var row = await connection.QuerySingleAsync<DtoRow>(new CommandDefinition(sql, new
{
record.Id,
record.DocumentId,
record.SourceName,
record.Format,
PayloadJson = payloadJson,
record.SchemaVersion,
record.CreatedAt,
record.ValidatedAt
}, cancellationToken: cancellationToken));
return ToRecord(row);
}
public async Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken)
{
const string sql = """
SELECT id, document_id, source_name, format, payload_json, schema_version, created_at, validated_at
FROM concelier.dtos
WHERE document_id = @DocumentId
LIMIT 1;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var row = await connection.QuerySingleOrDefaultAsync<DtoRow>(new CommandDefinition(sql, new { DocumentId = documentId }, cancellationToken: cancellationToken));
return row is null ? null : ToRecord(row);
}
public async Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken)
{
const string sql = """
SELECT id, document_id, source_name, format, payload_json, schema_version, created_at, validated_at
FROM concelier.dtos
WHERE source_name = @SourceName
ORDER BY created_at DESC
LIMIT @Limit;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var rows = await connection.QueryAsync<DtoRow>(new CommandDefinition(sql, new { SourceName = sourceName, Limit = limit }, cancellationToken: cancellationToken));
return rows.Select(ToRecord).ToArray();
}
private DtoRecord ToRecord(DtoRow row)
{
var payload = MongoDB.Bson.BsonDocument.Parse(row.PayloadJson);
return new DtoRecord(
row.Id,
row.DocumentId,
row.SourceName,
row.Format,
payload,
row.CreatedAt,
row.SchemaVersion,
row.ValidatedAt);
}
private sealed record DtoRow(
Guid Id,
Guid DocumentId,
string SourceName,
string Format,
string PayloadJson,
string SchemaVersion,
DateTimeOffset CreatedAt,
DateTimeOffset ValidatedAt);
}

View File

@@ -0,0 +1,119 @@
using System.Text.Json;
using Dapper;
using StellaOps.Concelier.Storage.Mongo.Exporting;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
internal sealed class PostgresExportStateStore : IExportStateStore
{
private readonly ConcelierDataSource _dataSource;
private readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.General)
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public PostgresExportStateStore(ConcelierDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
{
const string sql = """
SELECT id,
export_cursor,
last_full_digest,
last_delta_digest,
base_export_id,
base_digest,
target_repository,
files,
exporter_version,
updated_at
FROM concelier.export_states
WHERE id = @Id
LIMIT 1;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var row = await connection.QuerySingleOrDefaultAsync<ExportStateRow>(new CommandDefinition(sql, new { Id = id }, cancellationToken: cancellationToken));
return row is null ? null : ToRecord(row);
}
public async Task<ExportStateRecord> UpsertAsync(ExportStateRecord record, CancellationToken cancellationToken)
{
const string sql = """
INSERT INTO concelier.export_states
(id, export_cursor, last_full_digest, last_delta_digest, base_export_id, base_digest, target_repository, files, exporter_version, updated_at)
VALUES (@Id, @ExportCursor, @LastFullDigest, @LastDeltaDigest, @BaseExportId, @BaseDigest, @TargetRepository, @Files, @ExporterVersion, @UpdatedAt)
ON CONFLICT (id) DO UPDATE
SET export_cursor = EXCLUDED.export_cursor,
last_full_digest = EXCLUDED.last_full_digest,
last_delta_digest = EXCLUDED.last_delta_digest,
base_export_id = EXCLUDED.base_export_id,
base_digest = EXCLUDED.base_digest,
target_repository = EXCLUDED.target_repository,
files = EXCLUDED.files,
exporter_version = EXCLUDED.exporter_version,
updated_at = EXCLUDED.updated_at
RETURNING id,
export_cursor,
last_full_digest,
last_delta_digest,
base_export_id,
base_digest,
target_repository,
files,
exporter_version,
updated_at;
""";
var filesJson = JsonSerializer.Serialize(record.Files, _jsonOptions);
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var row = await connection.QuerySingleAsync<ExportStateRow>(new CommandDefinition(sql, new
{
record.Id,
record.ExportCursor,
record.LastFullDigest,
record.LastDeltaDigest,
record.BaseExportId,
record.BaseDigest,
record.TargetRepository,
Files = filesJson,
record.ExporterVersion,
record.UpdatedAt
}, cancellationToken: cancellationToken));
return ToRecord(row);
}
private ExportStateRecord ToRecord(ExportStateRow row)
{
var files = JsonSerializer.Deserialize<IReadOnlyList<ExportFileRecord>>(row.Files, _jsonOptions) ?? Array.Empty<ExportFileRecord>();
return new ExportStateRecord(
row.Id,
row.ExportCursor,
row.LastFullDigest,
row.LastDeltaDigest,
row.BaseExportId,
row.BaseDigest,
row.TargetRepository,
files,
row.ExporterVersion,
row.UpdatedAt);
}
private sealed record ExportStateRow(
string Id,
string ExportCursor,
string? LastFullDigest,
string? LastDeltaDigest,
string? BaseExportId,
string? BaseDigest,
string? TargetRepository,
string Files,
string ExporterVersion,
DateTimeOffset UpdatedAt);
}

View File

@@ -0,0 +1,58 @@
using Dapper;
using StellaOps.Concelier.Storage.Mongo.JpFlags;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
internal sealed class PostgresJpFlagStore : IJpFlagStore
{
private readonly ConcelierDataSource _dataSource;
public PostgresJpFlagStore(ConcelierDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task UpsertAsync(JpFlagRecord record, CancellationToken cancellationToken)
{
const string sql = """
INSERT INTO concelier.jp_flags (advisory_key, source_name, category, vendor_status, created_at)
VALUES (@AdvisoryKey, @SourceName, @Category, @VendorStatus, @CreatedAt)
ON CONFLICT (advisory_key) DO UPDATE
SET source_name = EXCLUDED.source_name,
category = EXCLUDED.category,
vendor_status = EXCLUDED.vendor_status,
created_at = EXCLUDED.created_at;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
record.AdvisoryKey,
record.SourceName,
record.Category,
record.VendorStatus,
record.CreatedAt
}, cancellationToken: cancellationToken));
}
public async Task<JpFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
{
const string sql = """
SELECT advisory_key, source_name, category, vendor_status, created_at
FROM concelier.jp_flags
WHERE advisory_key = @AdvisoryKey
LIMIT 1;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var row = await connection.QuerySingleOrDefaultAsync<JpFlagRow>(new CommandDefinition(sql, new { AdvisoryKey = advisoryKey }, cancellationToken: cancellationToken));
return row is null ? null : new JpFlagRecord(row.AdvisoryKey, row.SourceName, row.Category, row.VendorStatus, row.CreatedAt);
}
private sealed record JpFlagRow(
string AdvisoryKey,
string SourceName,
string Category,
string? VendorStatus,
DateTimeOffset CreatedAt);
}

View File

@@ -0,0 +1,76 @@
using Dapper;
using StellaOps.Concelier.Storage.Mongo.PsirtFlags;
namespace StellaOps.Concelier.Storage.Postgres.Repositories;
internal sealed class PostgresPsirtFlagStore : IPsirtFlagStore
{
private readonly ConcelierDataSource _dataSource;
public PostgresPsirtFlagStore(ConcelierDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task UpsertAsync(PsirtFlagRecord flag, CancellationToken cancellationToken)
{
const string sql = """
INSERT INTO concelier.psirt_flags (advisory_id, vendor, source_name, external_id, recorded_at)
VALUES (@AdvisoryId, @Vendor, @SourceName, @ExternalId, @RecordedAt)
ON CONFLICT (advisory_id, vendor) DO UPDATE
SET source_name = EXCLUDED.source_name,
external_id = EXCLUDED.external_id,
recorded_at = EXCLUDED.recorded_at;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
flag.AdvisoryId,
flag.Vendor,
flag.SourceName,
flag.ExternalId,
flag.RecordedAt
}, cancellationToken: cancellationToken));
}
public async Task<IReadOnlyList<PsirtFlagRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
{
const string sql = """
SELECT advisory_id, vendor, source_name, external_id, recorded_at
FROM concelier.psirt_flags
WHERE advisory_id = @AdvisoryId
ORDER BY recorded_at DESC
LIMIT @Limit;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var rows = await connection.QueryAsync<PsirtFlagRow>(new CommandDefinition(sql, new { AdvisoryId = advisoryKey, Limit = limit }, cancellationToken: cancellationToken));
return rows.Select(ToRecord).ToArray();
}
public async Task<PsirtFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
{
const string sql = """
SELECT advisory_id, vendor, source_name, external_id, recorded_at
FROM concelier.psirt_flags
WHERE advisory_id = @AdvisoryId
ORDER BY recorded_at DESC
LIMIT 1;
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
var row = await connection.QuerySingleOrDefaultAsync<PsirtFlagRow>(new CommandDefinition(sql, new { AdvisoryId = advisoryKey }, cancellationToken: cancellationToken));
return row is null ? null : ToRecord(row);
}
private static PsirtFlagRecord ToRecord(PsirtFlagRow row) =>
new(row.AdvisoryId, row.Vendor, row.SourceName, row.ExternalId, row.RecordedAt);
private sealed record PsirtFlagRow(
string AdvisoryId,
string Vendor,
string SourceName,
string? ExternalId,
DateTimeOffset RecordedAt);
}

View File

@@ -7,6 +7,10 @@ using StellaOps.Infrastructure.Postgres.Options;
using StellaOps.Concelier.Core.Linksets;
using MongoContracts = StellaOps.Concelier.Storage.Mongo;
using MongoAdvisories = StellaOps.Concelier.Storage.Mongo.Advisories;
using MongoExporting = StellaOps.Concelier.Storage.Mongo.Exporting;
using MongoJpFlags = StellaOps.Concelier.Storage.Mongo.JpFlags;
using MongoPsirt = StellaOps.Concelier.Storage.Mongo.PsirtFlags;
using MongoHistory = StellaOps.Concelier.Storage.Mongo.ChangeHistory;
namespace StellaOps.Concelier.Storage.Postgres;
@@ -51,6 +55,11 @@ public static class ServiceCollectionExtensions
services.AddScoped<IAdvisoryLinksetStore, AdvisoryLinksetCacheRepository>();
services.AddScoped<IAdvisoryLinksetLookup>(sp => sp.GetRequiredService<IAdvisoryLinksetStore>());
services.AddScoped<MongoContracts.IDocumentStore, PostgresDocumentStore>();
services.AddScoped<MongoContracts.IDtoStore, PostgresDtoStore>();
services.AddScoped<MongoExporting.IExportStateStore, PostgresExportStateStore>();
services.AddScoped<MongoPsirt.IPsirtFlagStore, PostgresPsirtFlagStore>();
services.AddScoped<MongoJpFlags.IJpFlagStore, PostgresJpFlagStore>();
services.AddScoped<MongoHistory.IChangeHistoryStore, PostgresChangeHistoryStore>();
return services;
}
@@ -89,6 +98,11 @@ public static class ServiceCollectionExtensions
services.AddScoped<IAdvisoryLinksetStore, AdvisoryLinksetCacheRepository>();
services.AddScoped<IAdvisoryLinksetLookup>(sp => sp.GetRequiredService<IAdvisoryLinksetStore>());
services.AddScoped<MongoContracts.IDocumentStore, PostgresDocumentStore>();
services.AddScoped<MongoContracts.IDtoStore, PostgresDtoStore>();
services.AddScoped<MongoExporting.IExportStateStore, PostgresExportStateStore>();
services.AddScoped<MongoPsirt.IPsirtFlagStore, PostgresPsirtFlagStore>();
services.AddScoped<MongoJpFlags.IJpFlagStore, PostgresJpFlagStore>();
services.AddScoped<MongoHistory.IChangeHistoryStore, PostgresChangeHistoryStore>();
return services;
}

View File

@@ -30,6 +30,7 @@
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" />
</ItemGroup>

View File

@@ -1,81 +1,60 @@
using System;
using System.IO;
using System.Linq;
using MongoDB.Bson;
using System.Threading;
using System.Threading.Tasks;
using Mongo2Go;
using Xunit;
using MongoDB.Driver;
using Xunit;
namespace StellaOps.Concelier.Testing;
public sealed class MongoIntegrationFixture : IAsyncLifetime
{
public MongoDbRunner Runner { get; private set; } = null!;
public IMongoDatabase Database { get; private set; } = null!;
public IMongoClient Client { get; private set; } = null!;
public Task InitializeAsync()
/// <summary>
/// In-memory stand-in for the legacy Mongo2Go fixture. No external processes are launched;
/// DropDatabaseAsync simply resets the backing in-memory collections.
/// </summary>
public sealed class MongoIntegrationFixture : IAsyncLifetime
{
private readonly FixtureMongoClient _client;
private MongoDatabase _database;
public MongoIntegrationFixture()
{
EnsureMongo2GoEnvironment();
Runner = MongoDbRunner.Start(singleNodeReplSet: true);
Client = new MongoClient(Runner.ConnectionString);
Database = Client.GetDatabase($"concelier-tests-{Guid.NewGuid():N}");
return Task.CompletedTask;
}
public Task DisposeAsync()
{
Runner.Dispose();
return Task.CompletedTask;
_client = new FixtureMongoClient(this);
Runner = MongoDbRunner.Start(singleNodeReplSet: false);
_database = CreateDatabase();
}
private static void EnsureMongo2GoEnvironment()
public MongoDbRunner Runner { get; }
public IMongoDatabase Database => _database;
public IMongoClient Client => _client;
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync() => Task.CompletedTask;
internal void Reset()
{
if (!OperatingSystem.IsLinux())
{
return;
}
var libraryPath = ResolveOpenSslLibraryPath();
if (libraryPath is null)
{
return;
}
var existing = Environment.GetEnvironmentVariable("LD_LIBRARY_PATH");
if (string.IsNullOrEmpty(existing))
{
Environment.SetEnvironmentVariable("LD_LIBRARY_PATH", libraryPath);
return;
}
var segments = existing.Split(':', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
if (!segments.Contains(libraryPath, StringComparer.Ordinal))
{
Environment.SetEnvironmentVariable("LD_LIBRARY_PATH", string.Join(':', new[] { libraryPath }.Concat(segments)));
}
_database = CreateDatabase();
}
private static string? ResolveOpenSslLibraryPath()
private MongoDatabase CreateDatabase() => new($"concelier-tests-{Guid.NewGuid():N}");
private sealed class FixtureMongoClient : IMongoClient
{
var current = AppContext.BaseDirectory;
while (!string.IsNullOrEmpty(current))
private readonly MongoIntegrationFixture _fixture;
public FixtureMongoClient(MongoIntegrationFixture fixture)
{
var candidate = Path.Combine(current, "tools", "openssl", "linux-x64");
if (Directory.Exists(candidate))
{
return candidate;
}
var parent = Directory.GetParent(current);
if (parent is null)
{
break;
}
current = parent.FullName;
_fixture = fixture;
}
return null;
public IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null) => _fixture.Database;
public Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default)
{
_fixture.Reset();
return Task.CompletedTask;
}
}
}