Implement ledger metrics for observability and add tests for Ruby packages endpoints
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Added `LedgerMetrics` class to record write latency and total events for ledger operations.
- Created comprehensive tests for Ruby packages endpoints, covering scenarios for missing inventory, successful retrieval, and identifier handling.
- Introduced `TestSurfaceSecretsScope` for managing environment variables during tests.
- Developed `ProvenanceMongoExtensions` for attaching DSSE provenance and trust information to event documents.
- Implemented `EventProvenanceWriter` and `EventWriter` classes for managing event provenance in MongoDB.
- Established MongoDB indexes for efficient querying of events based on provenance and trust.
- Added models and JSON parsing logic for DSSE provenance and trust information.
This commit is contained in:
master
2025-11-13 09:29:09 +02:00
parent 151f6b35cc
commit 61f963fd52
101 changed files with 5881 additions and 1776 deletions

View File

@@ -901,7 +901,8 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
throw new ArgumentException("Scan identifier is required.", nameof(scanId));
}
using var request = CreateRequest(HttpMethod.Get, $"api/scans/{scanId}/ruby-packages");
var encodedScanId = Uri.EscapeDataString(scanId);
using var request = CreateRequest(HttpMethod.Get, $"api/scans/{encodedScanId}/ruby-packages");
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);

View File

@@ -1,26 +1,72 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Concelier.WebService.Contracts;
public sealed record AdvisoryChunkCollectionResponse(
public sealed record AdvisoryStructuredFieldResponse(
string AdvisoryKey,
int Total,
bool Truncated,
IReadOnlyList<AdvisoryChunkItemResponse> Chunks,
IReadOnlyList<AdvisoryChunkSourceResponse> Sources);
IReadOnlyList<AdvisoryStructuredFieldEntry> Entries);
public sealed record AdvisoryChunkItemResponse(
public sealed record AdvisoryStructuredFieldEntry(
string Type,
string DocumentId,
string FieldPath,
string ChunkId,
string Section,
string ParagraphId,
string Text,
IReadOnlyDictionary<string, string> Metadata);
AdvisoryStructuredFieldContent Content,
AdvisoryStructuredFieldProvenance Provenance);
public sealed record AdvisoryChunkSourceResponse(
string ObservationId,
string DocumentId,
string Format,
string Vendor,
string ContentHash,
DateTimeOffset CreatedAt);
public sealed record AdvisoryStructuredFieldContent
{
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Title { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Description { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Url { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Note { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public AdvisoryStructuredFixContent? Fix { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public AdvisoryStructuredCvssContent? Cvss { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public AdvisoryStructuredAffectedContent? Affected { get; init; }
}
public sealed record AdvisoryStructuredFixContent(
string? PackageType,
string? PackageIdentifier,
string? FixedVersion,
string? ReferenceUrl);
public sealed record AdvisoryStructuredCvssContent(
string Version,
string Vector,
double BaseScore,
string Severity);
public sealed record AdvisoryStructuredAffectedContent(
string PackageType,
string PackageIdentifier,
string? Platform,
string RangeKind,
string? IntroducedVersion,
string? FixedVersion,
string? LastAffectedVersion,
string? RangeExpression,
string? Status);
public sealed record AdvisoryStructuredFieldProvenance(
string Source,
string Kind,
string? Value,
DateTimeOffset RecordedAt,
IReadOnlyList<string> FieldMask);

View File

@@ -24,9 +24,9 @@ using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Core.Observations;
using StellaOps.Concelier.Core.Linksets;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.WebService.Diagnostics;
using Serilog;
using StellaOps.Concelier.Merge;
@@ -50,6 +50,10 @@ using StellaOps.Concelier.WebService.Contracts;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.Core.Raw;
using StellaOps.Concelier.RawModels;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Provenance.Mongo;
var builder = WebApplication.CreateBuilder(args);
@@ -812,6 +816,8 @@ var advisoryChunksEndpoint = app.MapGet("/advisories/{advisoryKey}/chunks", asyn
[FromServices] IAdvisoryObservationQueryService observationService,
[FromServices] AdvisoryChunkBuilder chunkBuilder,
[FromServices] IAdvisoryChunkCache chunkCache,
[FromServices] IAdvisoryStore advisoryStore,
[FromServices] IAliasStore aliasStore,
[FromServices] IAdvisoryAiTelemetry telemetry,
[FromServices] TimeProvider timeProvider,
CancellationToken cancellationToken) =>
@@ -854,21 +860,37 @@ var advisoryChunksEndpoint = app.MapGet("/advisories/{advisoryKey}/chunks", asyn
var sectionFilter = BuildFilterSet(context.Request.Query["section"]);
var formatFilter = BuildFilterSet(context.Request.Query["format"]);
var resolution = await ResolveAdvisoryAsync(
normalizedKey,
advisoryStore,
aliasStore,
cancellationToken).ConfigureAwait(false);
if (resolution is null)
{
telemetry.TrackChunkFailure(tenant, normalizedKey, "advisory_not_found", "not_found");
return Problem(context, "Advisory not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, $"No advisory found for {normalizedKey}.");
}
var (advisory, aliasList, fingerprint) = resolution.Value;
var aliasCandidates = aliasList.IsDefaultOrEmpty
? ImmutableArray.Create(advisory.AdvisoryKey)
: aliasList;
var queryOptions = new AdvisoryObservationQueryOptions(
tenant,
aliases: new[] { normalizedKey },
aliases: aliasCandidates,
limit: observationLimit);
var observationResult = await observationService.QueryAsync(queryOptions, cancellationToken).ConfigureAwait(false);
if (observationResult.Observations.IsDefaultOrEmpty || observationResult.Observations.Length == 0)
{
telemetry.TrackChunkFailure(tenant, normalizedKey, "advisory_not_found", "not_found");
return Problem(context, "Advisory not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, $"No observations available for {normalizedKey}.");
telemetry.TrackChunkFailure(tenant, advisory.AdvisoryKey, "advisory_not_found", "not_found");
return Problem(context, "Advisory not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, $"No observations available for {advisory.AdvisoryKey}.");
}
var observations = observationResult.Observations.ToArray();
var buildOptions = new AdvisoryChunkBuildOptions(
normalizedKey,
advisory.AdvisoryKey,
chunkLimit,
observationLimit,
sectionFilter,
@@ -884,7 +906,7 @@ var advisoryChunksEndpoint = app.MapGet("/advisories/{advisoryKey}/chunks", asyn
if (cacheDuration > TimeSpan.Zero)
{
var cacheKey = AdvisoryChunkCacheKey.Create(tenant, normalizedKey, buildOptions, observations);
var cacheKey = AdvisoryChunkCacheKey.Create(tenant, advisory.AdvisoryKey, buildOptions, observations, fingerprint);
if (chunkCache.TryGet(cacheKey, out var cachedResult))
{
buildResult = cachedResult;
@@ -892,13 +914,13 @@ var advisoryChunksEndpoint = app.MapGet("/advisories/{advisoryKey}/chunks", asyn
}
else
{
buildResult = chunkBuilder.Build(buildOptions, observations);
buildResult = chunkBuilder.Build(buildOptions, advisory, observations);
chunkCache.Set(cacheKey, buildResult, cacheDuration);
}
}
else
{
buildResult = chunkBuilder.Build(buildOptions, observations);
buildResult = chunkBuilder.Build(buildOptions, advisory, observations);
}
var duration = timeProvider.GetElapsedTime(requestStart);
@@ -907,13 +929,13 @@ var advisoryChunksEndpoint = app.MapGet("/advisories/{advisoryKey}/chunks", asyn
telemetry.TrackChunkResult(new AdvisoryAiChunkRequestTelemetry(
tenant,
normalizedKey,
advisory.AdvisoryKey,
"ok",
buildResult.Response.Truncated,
cacheHit,
observations.Length,
buildResult.Telemetry.SourceCount,
buildResult.Response.Chunks.Count,
buildResult.Response.Entries.Count,
duration,
guardrailCounts));
@@ -1055,6 +1077,52 @@ app.MapGet("/concelier/advisories/{vulnerabilityKey}/replay", async (
return JsonResult(response);
});
var statementProvenanceEndpoint = app.MapPost("/events/statements/{statementId:guid}/provenance", async (
Guid statementId,
HttpContext context,
[FromServices] IAdvisoryEventLog eventLog,
CancellationToken cancellationToken) =>
{
if (!TryResolveTenant(context, requireHeader: true, out var tenant, out var tenantError))
{
return tenantError;
}
var authorizationError = EnsureTenantAuthorized(context, tenant);
if (authorizationError is not null)
{
return authorizationError;
}
try
{
using var document = await JsonDocument.ParseAsync(context.Request.Body, cancellationToken: cancellationToken).ConfigureAwait(false);
var (dsse, trust) = ProvenanceJsonParser.Parse(document.RootElement);
if (!trust.Verified)
{
return Problem(context, "Unverified provenance", StatusCodes.Status400BadRequest, ProblemTypes.Validation, "trust.verified must be true.");
}
await eventLog.AttachStatementProvenanceAsync(statementId, dsse, trust, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
return Problem(context, "Invalid provenance payload", StatusCodes.Status400BadRequest, ProblemTypes.Validation, ex.Message);
}
catch (InvalidOperationException ex)
{
return Problem(context, "Statement not found", StatusCodes.Status404NotFound, ProblemTypes.NotFound, ex.Message);
}
return Results.Accepted($"/events/statements/{statementId}");
});
if (authorityConfigured)
{
statementProvenanceEndpoint.RequireAuthorization(AdvisoryIngestPolicyName);
}
var loggingEnabled = concelierOptions.Telemetry?.EnableLogging ?? true;
if (loggingEnabled)
@@ -1250,6 +1318,149 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
return null;
}
async Task<(Advisory Advisory, ImmutableArray<string> Aliases, string Fingerprint)?> ResolveAdvisoryAsync(
string advisoryKey,
IAdvisoryStore advisoryStore,
IAliasStore aliasStore,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(advisoryStore);
ArgumentNullException.ThrowIfNull(aliasStore);
var directCandidates = new List<string>();
if (!string.IsNullOrWhiteSpace(advisoryKey))
{
var trimmed = advisoryKey.Trim();
if (!string.IsNullOrWhiteSpace(trimmed))
{
directCandidates.Add(trimmed);
var upper = trimmed.ToUpperInvariant();
if (!string.Equals(upper, trimmed, StringComparison.Ordinal))
{
directCandidates.Add(upper);
}
}
}
foreach (var candidate in directCandidates.Distinct(StringComparer.OrdinalIgnoreCase))
{
var advisory = await advisoryStore.FindAsync(candidate, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
return CreateResolution(advisory);
}
}
var aliasMatches = new List<AliasRecord>();
foreach (var (scheme, value) in BuildAliasLookups(advisoryKey))
{
var records = await aliasStore.GetByAliasAsync(scheme, value, cancellationToken).ConfigureAwait(false);
if (records.Count > 0)
{
aliasMatches.AddRange(records);
}
}
if (aliasMatches.Count == 0)
{
return null;
}
foreach (var candidate in aliasMatches
.OrderByDescending(record => record.UpdatedAt)
.ThenBy(record => record.AdvisoryKey, StringComparer.Ordinal)
.Select(record => record.AdvisoryKey)
.Distinct(StringComparer.OrdinalIgnoreCase))
{
var advisory = await advisoryStore.FindAsync(candidate, cancellationToken).ConfigureAwait(false);
if (advisory is not null)
{
return CreateResolution(advisory);
}
}
return null;
}
static (Advisory Advisory, ImmutableArray<string> Aliases, string Fingerprint) CreateResolution(Advisory advisory)
{
var fingerprint = AdvisoryFingerprint.Compute(advisory);
var aliases = BuildAliasQuery(advisory);
return (advisory, aliases, fingerprint);
}
static ImmutableArray<string> BuildAliasQuery(Advisory advisory)
{
var set = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (!string.IsNullOrWhiteSpace(advisory.AdvisoryKey))
{
set.Add(advisory.AdvisoryKey.Trim());
}
foreach (var alias in advisory.Aliases)
{
if (!string.IsNullOrWhiteSpace(alias))
{
set.Add(alias.Trim());
}
}
if (set.Count == 0)
{
return ImmutableArray<string>.Empty;
}
var ordered = set
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToList();
var canonical = advisory.AdvisoryKey?.Trim();
if (!string.IsNullOrWhiteSpace(canonical))
{
ordered.RemoveAll(value => string.Equals(value, canonical, StringComparison.OrdinalIgnoreCase));
ordered.Insert(0, canonical);
}
return ordered.ToImmutableArray();
}
static IReadOnlyList<(string Scheme, string Value)> BuildAliasLookups(string? candidate)
{
var pairs = new List<(string Scheme, string Value)>();
var seen = new HashSet<string>(StringComparer.Ordinal);
void Add(string scheme, string? value)
{
if (string.IsNullOrWhiteSpace(scheme) || string.IsNullOrWhiteSpace(value))
{
return;
}
var trimmed = value.Trim();
if (trimmed.Length == 0)
{
return;
}
var key = $"{scheme}\u0001{trimmed}";
if (seen.Add(key))
{
pairs.Add((scheme, trimmed));
}
}
if (AliasSchemeRegistry.TryNormalize(candidate, out var normalized, out var scheme))
{
Add(scheme, normalized);
}
Add(AliasStoreConstants.UnscopedScheme, candidate);
Add(AliasStoreConstants.PrimaryScheme, candidate);
return pairs;
}
ImmutableHashSet<string> BuildFilterSet(StringValues values)
{
if (values.Count == 0)

View File

@@ -3,8 +3,7 @@ using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Models.Observations;
using StellaOps.Concelier.WebService.Contracts;
using StellaOps.Cryptography;
@@ -21,7 +20,24 @@ internal sealed record AdvisoryChunkBuildOptions(
internal sealed class AdvisoryChunkBuilder
{
private const int DefaultMinLength = 40;
private const string SectionWorkaround = "workaround";
private const string SectionFix = "fix";
private const string SectionCvss = "cvss";
private const string SectionAffected = "affected";
private static readonly ImmutableArray<string> SectionOrder = ImmutableArray.Create(
SectionWorkaround,
SectionFix,
SectionCvss,
SectionAffected);
private static readonly ImmutableHashSet<string> WorkaroundKinds = ImmutableHashSet.Create(
StringComparer.OrdinalIgnoreCase,
"workaround",
"mitigation",
"temporary_fix",
"work-around");
private readonly ICryptoHash _hash;
public AdvisoryChunkBuilder(ICryptoHash hash)
@@ -31,275 +47,330 @@ internal sealed class AdvisoryChunkBuilder
public AdvisoryChunkBuildResult Build(
AdvisoryChunkBuildOptions options,
Advisory advisory,
IReadOnlyList<AdvisoryObservation> observations)
{
var chunks = new List<AdvisoryChunkItemResponse>(Math.Min(options.ChunkLimit, 256));
var sources = new List<AdvisoryChunkSourceResponse>();
ArgumentNullException.ThrowIfNull(options);
ArgumentNullException.ThrowIfNull(advisory);
ArgumentNullException.ThrowIfNull(observations);
var vendorIndex = new ObservationIndex(observations);
var chunkLimit = Math.Max(1, options.ChunkLimit);
var entries = new List<AdvisoryStructuredFieldEntry>(chunkLimit);
var total = 0;
var truncated = false;
var guardrailCounts = new Dictionary<AdvisoryChunkGuardrailReason, int>();
var sectionFilter = options.SectionFilter ?? ImmutableHashSet<string>.Empty;
foreach (var observation in observations
.OrderByDescending(o => o.CreatedAt))
foreach (var section in SectionOrder)
{
if (sources.Count >= options.ObservationLimit)
{
truncated = truncated || chunks.Count == options.ChunkLimit;
break;
}
if (options.FormatFilter.Count > 0 &&
!options.FormatFilter.Contains(observation.Content.Format))
if (!ShouldInclude(sectionFilter, section))
{
continue;
}
var documentId = DetermineDocumentId(observation);
sources.Add(new AdvisoryChunkSourceResponse(
observation.ObservationId,
documentId,
observation.Content.Format,
observation.Source.Vendor,
observation.Upstream.ContentHash,
observation.CreatedAt));
foreach (var chunk in ExtractChunks(observation, documentId, options, guardrailCounts))
IReadOnlyList<AdvisoryStructuredFieldEntry> bucket = section switch
{
total++;
if (chunks.Count < options.ChunkLimit)
{
chunks.Add(chunk);
}
else
{
truncated = true;
break;
}
SectionWorkaround => BuildWorkaroundEntries(advisory, vendorIndex),
SectionFix => BuildFixEntries(advisory, vendorIndex),
SectionCvss => BuildCvssEntries(advisory, vendorIndex),
SectionAffected => BuildAffectedEntries(advisory, vendorIndex),
_ => Array.Empty<AdvisoryStructuredFieldEntry>()
};
if (bucket.Count == 0)
{
continue;
}
if (truncated)
total += bucket.Count;
if (entries.Count >= chunkLimit)
{
break;
truncated = true;
continue;
}
var remaining = chunkLimit - entries.Count;
if (bucket.Count <= remaining)
{
entries.AddRange(bucket);
}
else
{
entries.AddRange(bucket.Take(remaining));
truncated = true;
}
}
if (!truncated)
{
total = chunks.Count;
}
var response = new AdvisoryChunkCollectionResponse(
var response = new AdvisoryStructuredFieldResponse(
options.AdvisoryKey,
total,
truncated,
chunks,
sources);
var guardrailSnapshot = guardrailCounts.Count == 0
? ImmutableDictionary<AdvisoryChunkGuardrailReason, int>.Empty
: guardrailCounts.ToImmutableDictionary();
entries);
var telemetry = new AdvisoryChunkTelemetrySummary(
sources.Count,
vendorIndex.SourceCount,
truncated,
guardrailSnapshot);
ImmutableDictionary<AdvisoryChunkGuardrailReason, int>.Empty);
return new AdvisoryChunkBuildResult(response, telemetry);
}
private static string DetermineDocumentId(AdvisoryObservation observation)
private IReadOnlyList<AdvisoryStructuredFieldEntry> BuildWorkaroundEntries(Advisory advisory, ObservationIndex index)
{
if (!string.IsNullOrWhiteSpace(observation.Upstream.UpstreamId))
if (advisory.References.Length == 0)
{
return observation.Upstream.UpstreamId;
return Array.Empty<AdvisoryStructuredFieldEntry>();
}
return observation.ObservationId;
}
private IEnumerable<AdvisoryChunkItemResponse> ExtractChunks(
AdvisoryObservation observation,
string documentId,
AdvisoryChunkBuildOptions options,
IDictionary<AdvisoryChunkGuardrailReason, int> guardrailCounts)
{
var root = observation.Content.Raw;
if (root is null)
var list = new List<AdvisoryStructuredFieldEntry>();
for (var i = 0; i < advisory.References.Length; i++)
{
yield break;
}
var stack = new Stack<(JsonNode Node, string Path, string Section)>();
stack.Push((root, string.Empty, string.Empty));
while (stack.Count > 0)
{
var (node, path, section) = stack.Pop();
if (node is null)
var reference = advisory.References[i];
if (string.IsNullOrWhiteSpace(reference.Kind) || !WorkaroundKinds.Contains(reference.Kind))
{
continue;
}
switch (node)
var content = new AdvisoryStructuredFieldContent
{
case JsonValue value:
if (!TryNormalize(value, out var text))
{
IncrementGuardrailCount(guardrailCounts, AdvisoryChunkGuardrailReason.NormalizationFailed);
break;
}
Title = reference.SourceTag ?? reference.Kind,
Description = reference.Summary,
Url = reference.Url
};
if (text.Length < Math.Max(options.MinimumLength, DefaultMinLength))
{
IncrementGuardrailCount(guardrailCounts, AdvisoryChunkGuardrailReason.BelowMinimumLength);
break;
}
if (!ContainsLetter(text))
{
IncrementGuardrailCount(guardrailCounts, AdvisoryChunkGuardrailReason.MissingAlphabeticCharacters);
break;
}
var resolvedSection = string.IsNullOrEmpty(section) ? documentId : section;
if (options.SectionFilter.Count > 0 && !options.SectionFilter.Contains(resolvedSection))
{
break;
}
var paragraphId = string.IsNullOrEmpty(path) ? resolvedSection : path;
var chunkId = CreateChunkId(documentId, paragraphId);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["path"] = paragraphId,
["section"] = resolvedSection,
["format"] = observation.Content.Format
};
if (!string.IsNullOrEmpty(observation.Content.SpecVersion))
{
metadata["specVersion"] = observation.Content.SpecVersion!;
}
yield return new AdvisoryChunkItemResponse(
documentId,
chunkId,
resolvedSection,
paragraphId,
text,
metadata);
break;
case JsonObject obj:
foreach (var property in obj.Reverse())
{
var childSection = string.IsNullOrEmpty(section) ? property.Key : section;
var childPath = AppendPath(path, property.Key);
if (property.Value is { } childNode)
{
stack.Push((childNode, childPath, childSection));
}
}
break;
case JsonArray array:
for (var index = array.Count - 1; index >= 0; index--)
{
var childPath = AppendIndex(path, index);
if (array[index] is { } childNode)
{
stack.Push((childNode, childPath, section));
}
}
break;
}
list.Add(CreateEntry(
SectionWorkaround,
index.Resolve(reference.Provenance),
$"/references/{i}",
content,
reference.Provenance));
}
return list.Count == 0 ? Array.Empty<AdvisoryStructuredFieldEntry>() : list;
}
private static bool TryNormalize(JsonValue value, out string normalized)
private IReadOnlyList<AdvisoryStructuredFieldEntry> BuildFixEntries(Advisory advisory, ObservationIndex index)
{
normalized = string.Empty;
if (!value.TryGetValue(out string? text) || text is null)
if (advisory.AffectedPackages.Length == 0)
{
return false;
return Array.Empty<AdvisoryStructuredFieldEntry>();
}
var span = text.AsSpan();
var builder = new StringBuilder(span.Length);
var previousWhitespace = false;
var list = new List<AdvisoryStructuredFieldEntry>();
foreach (var ch in span)
for (var packageIndex = 0; packageIndex < advisory.AffectedPackages.Length; packageIndex++)
{
if (char.IsControl(ch) && !char.IsWhiteSpace(ch))
var package = advisory.AffectedPackages[packageIndex];
for (var rangeIndex = 0; rangeIndex < package.VersionRanges.Length; rangeIndex++)
{
continue;
}
if (char.IsWhiteSpace(ch))
{
if (previousWhitespace)
var range = package.VersionRanges[rangeIndex];
if (string.IsNullOrWhiteSpace(range.FixedVersion))
{
continue;
}
builder.Append(' ');
previousWhitespace = true;
var fix = new AdvisoryStructuredFixContent(
package.Type,
package.Identifier,
range.FixedVersion,
null);
var content = new AdvisoryStructuredFieldContent
{
Fix = fix,
Note = package.Provenance.FirstOrDefault()?.Value
};
list.Add(CreateEntry(
SectionFix,
index.Resolve(range.Provenance),
$"/affectedPackages/{packageIndex}/versionRanges/{rangeIndex}/fix",
content,
range.Provenance));
}
else
}
return list.Count == 0 ? Array.Empty<AdvisoryStructuredFieldEntry>() : list;
}
private IReadOnlyList<AdvisoryStructuredFieldEntry> BuildCvssEntries(Advisory advisory, ObservationIndex index)
{
if (advisory.CvssMetrics.Length == 0)
{
return Array.Empty<AdvisoryStructuredFieldEntry>();
}
var list = new List<AdvisoryStructuredFieldEntry>(advisory.CvssMetrics.Length);
for (var i = 0; i < advisory.CvssMetrics.Length; i++)
{
var metric = advisory.CvssMetrics[i];
var cvss = new AdvisoryStructuredCvssContent(
metric.Version,
metric.Vector,
metric.BaseScore,
metric.BaseSeverity);
var content = new AdvisoryStructuredFieldContent
{
builder.Append(ch);
previousWhitespace = false;
Cvss = cvss
};
list.Add(CreateEntry(
SectionCvss,
index.Resolve(metric.Provenance),
$"/cvssMetrics/{i}",
content,
metric.Provenance));
}
return list;
}
private IReadOnlyList<AdvisoryStructuredFieldEntry> BuildAffectedEntries(Advisory advisory, ObservationIndex index)
{
if (advisory.AffectedPackages.Length == 0)
{
return Array.Empty<AdvisoryStructuredFieldEntry>();
}
var list = new List<AdvisoryStructuredFieldEntry>();
for (var packageIndex = 0; packageIndex < advisory.AffectedPackages.Length; packageIndex++)
{
var package = advisory.AffectedPackages[packageIndex];
var status = package.Statuses.Length > 0 ? package.Statuses[0].Status : null;
for (var rangeIndex = 0; rangeIndex < package.VersionRanges.Length; rangeIndex++)
{
var range = package.VersionRanges[rangeIndex];
var affected = new AdvisoryStructuredAffectedContent(
package.Type,
package.Identifier,
package.Platform,
range.RangeKind,
range.IntroducedVersion,
range.FixedVersion,
range.LastAffectedVersion,
range.RangeExpression,
status);
var content = new AdvisoryStructuredFieldContent
{
Affected = affected
};
list.Add(CreateEntry(
SectionAffected,
index.Resolve(range.Provenance),
$"/affectedPackages/{packageIndex}/versionRanges/{rangeIndex}",
content,
range.Provenance));
}
}
normalized = builder.ToString().Trim();
return normalized.Length > 0;
return list.Count == 0 ? Array.Empty<AdvisoryStructuredFieldEntry>() : list;
}
private static bool ContainsLetter(string text)
=> text.Any(static ch => char.IsLetter(ch));
private static string AppendPath(string path, string? segment)
private AdvisoryStructuredFieldEntry CreateEntry(
string type,
string documentId,
string fieldPath,
AdvisoryStructuredFieldContent content,
AdvisoryProvenance provenance)
{
var safeSegment = segment ?? string.Empty;
return string.IsNullOrEmpty(path) ? safeSegment : string.Concat(path, '.', safeSegment);
var fingerprint = string.Concat(documentId, '|', fieldPath);
var chunkId = CreateChunkId(fingerprint);
return new AdvisoryStructuredFieldEntry(
type,
documentId,
fieldPath,
chunkId,
content,
new AdvisoryStructuredFieldProvenance(
provenance.Source,
provenance.Kind,
provenance.Value,
provenance.RecordedAt,
NormalizeFieldMask(provenance.FieldMask)));
}
private static string AppendIndex(string path, int index)
private static IReadOnlyList<string> NormalizeFieldMask(ImmutableArray<string> mask)
=> mask.IsDefaultOrEmpty ? Array.Empty<string>() : mask;
private string CreateChunkId(string input)
{
if (string.IsNullOrEmpty(path))
var bytes = Encoding.UTF8.GetBytes(input);
var digest = _hash.ComputeHash(bytes, HashAlgorithms.Sha256);
return Convert.ToHexString(digest.AsSpan(0, 8));
}
private static bool ShouldInclude(ImmutableHashSet<string> filter, string type)
=> filter.Count == 0 || filter.Contains(type);
private sealed class ObservationIndex
{
private const string UnknownObservationId = "unknown";
private readonly Dictionary<string, AdvisoryObservation> _byVendor;
private readonly Dictionary<string, AdvisoryObservation> _byObservationId;
private readonly Dictionary<string, AdvisoryObservation> _byUpstreamId;
private readonly string _fallbackId;
public ObservationIndex(IReadOnlyList<AdvisoryObservation> observations)
{
return $"[{index}]";
_byVendor = new Dictionary<string, AdvisoryObservation>(StringComparer.OrdinalIgnoreCase);
_byObservationId = new Dictionary<string, AdvisoryObservation>(StringComparer.OrdinalIgnoreCase);
_byUpstreamId = new Dictionary<string, AdvisoryObservation>(StringComparer.OrdinalIgnoreCase);
foreach (var observation in observations)
{
_byObservationId[observation.ObservationId] = observation;
if (!string.IsNullOrWhiteSpace(observation.Source.Vendor))
{
_byVendor[observation.Source.Vendor] = observation;
}
if (!string.IsNullOrWhiteSpace(observation.Upstream.UpstreamId))
{
_byUpstreamId[observation.Upstream.UpstreamId] = observation;
}
}
_fallbackId = observations.Count > 0 ? observations[0].ObservationId : UnknownObservationId;
SourceCount = observations.Count;
}
return string.Concat(path, '[', index.ToString(CultureInfo.InvariantCulture), ']');
}
public int SourceCount { get; }
private string CreateChunkId(string documentId, string paragraphId)
{
var input = string.Concat(documentId, '|', paragraphId);
var digest = _hash.ComputeHash(Encoding.UTF8.GetBytes(input), HashAlgorithms.Sha256);
return string.Concat(documentId, ':', Convert.ToHexString(digest.AsSpan(0, 8)));
}
private static void IncrementGuardrailCount(
IDictionary<AdvisoryChunkGuardrailReason, int> counts,
AdvisoryChunkGuardrailReason reason)
{
if (!counts.TryGetValue(reason, out var current))
public string Resolve(AdvisoryProvenance provenance)
{
current = 0;
}
if (!string.IsNullOrWhiteSpace(provenance.Value))
{
if (_byObservationId.TryGetValue(provenance.Value, out var obs))
{
return obs.ObservationId;
}
counts[reason] = current + 1;
if (_byUpstreamId.TryGetValue(provenance.Value, out obs))
{
return obs.ObservationId;
}
}
if (!string.IsNullOrWhiteSpace(provenance.Source) &&
_byVendor.TryGetValue(provenance.Source, out var vendorMatch))
{
return vendorMatch.ObservationId;
}
return _fallbackId;
}
}
}
internal sealed record AdvisoryChunkBuildResult(
AdvisoryChunkCollectionResponse Response,
AdvisoryStructuredFieldResponse Response,
AdvisoryChunkTelemetrySummary Telemetry);
internal sealed record AdvisoryChunkTelemetrySummary(

View File

@@ -53,7 +53,8 @@ internal readonly record struct AdvisoryChunkCacheKey(string Value)
string tenant,
string advisoryKey,
AdvisoryChunkBuildOptions options,
IReadOnlyList<AdvisoryObservation> observations)
IReadOnlyList<AdvisoryObservation> observations,
string advisoryFingerprint)
{
var builder = new StringBuilder();
builder.Append(tenant);
@@ -70,6 +71,8 @@ internal readonly record struct AdvisoryChunkCacheKey(string Value)
builder.Append('|');
AppendSet(builder, options.FormatFilter);
builder.Append('|');
builder.Append(advisoryFingerprint);
builder.Append('|');
foreach (var observation in observations
.OrderBy(static o => o.ObservationId, StringComparer.Ordinal))

View File

@@ -0,0 +1,20 @@
using System.Security.Cryptography;
using System.Text;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.WebService.Services;
internal static class AdvisoryFingerprint
{
public static string Compute(Advisory advisory)
{
ArgumentNullException.ThrowIfNull(advisory);
var canonical = CanonicalJsonSerializer.Serialize(advisory);
var bytes = Encoding.UTF8.GetBytes(canonical);
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(bytes);
return Convert.ToHexString(hash);
}
}

View File

@@ -0,0 +1,76 @@
using System;
using System.Text.Json;
using StellaOps.Concelier.Models;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Core.Events;
public static class AdvisoryDsseMetadataResolver
{
private static readonly string[] CandidateKinds =
{
"dsse",
"dsse-metadata",
"attestation",
"attestation-dsse"
};
public static bool TryResolve(Advisory advisory, out DsseProvenance? dsse, out TrustInfo? trust)
{
dsse = null;
trust = null;
if (advisory is null || advisory.Provenance.IsDefaultOrEmpty || advisory.Provenance.Length == 0)
{
return false;
}
foreach (var entry in advisory.Provenance)
{
if (!IsCandidateKind(entry.Kind) || string.IsNullOrWhiteSpace(entry.Value))
{
continue;
}
try
{
using var document = JsonDocument.Parse(entry.Value);
(dsse, trust) = ProvenanceJsonParser.Parse(document.RootElement);
if (dsse is not null && trust is not null)
{
return true;
}
}
catch (JsonException)
{
// Ignore malformed payloads; other provenance entries may contain valid DSSE metadata.
}
catch (InvalidOperationException)
{
// Same as above fall through to remaining provenance entries.
}
}
dsse = null;
trust = null;
return false;
}
private static bool IsCandidateKind(string? kind)
{
if (string.IsNullOrWhiteSpace(kind))
{
return false;
}
foreach (var candidate in CandidateKinds)
{
if (string.Equals(candidate, kind, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
return false;
}
}

View File

@@ -1,30 +1,35 @@
using System;
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Concelier.Models;
using System.Text.Json;
using StellaOps.Concelier.Models;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Core.Events;
/// <summary>
/// Input payload for appending a canonical advisory statement to the event log.
/// </summary>
public sealed record AdvisoryStatementInput(
string VulnerabilityKey,
Advisory Advisory,
DateTimeOffset AsOf,
IReadOnlyCollection<Guid> InputDocumentIds,
Guid? StatementId = null,
string? AdvisoryKey = null);
public sealed record AdvisoryStatementInput(
string VulnerabilityKey,
Advisory Advisory,
DateTimeOffset AsOf,
IReadOnlyCollection<Guid> InputDocumentIds,
Guid? StatementId = null,
string? AdvisoryKey = null,
DsseProvenance? Provenance = null,
TrustInfo? Trust = null);
/// <summary>
/// Input payload for appending an advisory conflict entry aligned with an advisory statement snapshot.
/// </summary>
public sealed record AdvisoryConflictInput(
string VulnerabilityKey,
JsonDocument Details,
DateTimeOffset AsOf,
IReadOnlyCollection<Guid> StatementIds,
Guid? ConflictId = null);
public sealed record AdvisoryConflictInput(
string VulnerabilityKey,
JsonDocument Details,
DateTimeOffset AsOf,
IReadOnlyCollection<Guid> StatementIds,
Guid? ConflictId = null,
DsseProvenance? Provenance = null,
TrustInfo? Trust = null);
/// <summary>
/// Append request encapsulating statement and conflict batches sharing a single persistence window.
@@ -70,24 +75,28 @@ public sealed record AdvisoryConflictSnapshot(
/// <summary>
/// Persistence-facing representation of an advisory statement used by repositories.
/// </summary>
public sealed record AdvisoryStatementEntry(
Guid StatementId,
string VulnerabilityKey,
string AdvisoryKey,
string CanonicalJson,
ImmutableArray<byte> StatementHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> InputDocumentIds);
public sealed record AdvisoryStatementEntry(
Guid StatementId,
string VulnerabilityKey,
string AdvisoryKey,
string CanonicalJson,
ImmutableArray<byte> StatementHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> InputDocumentIds,
DsseProvenance? Provenance = null,
TrustInfo? Trust = null);
/// <summary>
/// Persistence-facing representation of an advisory conflict used by repositories.
/// </summary>
public sealed record AdvisoryConflictEntry(
Guid ConflictId,
string VulnerabilityKey,
string CanonicalJson,
ImmutableArray<byte> ConflictHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> StatementIds);
public sealed record AdvisoryConflictEntry(
Guid ConflictId,
string VulnerabilityKey,
string CanonicalJson,
ImmutableArray<byte> ConflictHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
ImmutableArray<Guid> StatementIds,
DsseProvenance? Provenance = null,
TrustInfo? Trust = null);

View File

@@ -6,10 +6,11 @@ using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Models;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Models;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Core.Events;
@@ -78,14 +79,26 @@ public sealed class AdvisoryEventLog : IAdvisoryEventLog
.Select(ToStatementSnapshot)
.ToImmutableArray();
var conflictSnapshots = conflicts
.OrderByDescending(static entry => entry.AsOf)
.ThenByDescending(static entry => entry.RecordedAt)
.Select(ToConflictSnapshot)
.ToImmutableArray();
return new AdvisoryReplay(normalizedKey, asOf, statementSnapshots, conflictSnapshots);
}
var conflictSnapshots = conflicts
.OrderByDescending(static entry => entry.AsOf)
.ThenByDescending(static entry => entry.RecordedAt)
.Select(ToConflictSnapshot)
.ToImmutableArray();
return new AdvisoryReplay(normalizedKey, asOf, statementSnapshots, conflictSnapshots);
}
public ValueTask AttachStatementProvenanceAsync(
Guid statementId,
DsseProvenance provenance,
TrustInfo trust,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(provenance);
ArgumentNullException.ThrowIfNull(trust);
return _repository.AttachStatementProvenanceAsync(statementId, provenance, trust, cancellationToken);
}
private static AdvisoryStatementSnapshot ToStatementSnapshot(AdvisoryStatementEntry entry)
{
@@ -134,10 +147,10 @@ public sealed class AdvisoryEventLog : IAdvisoryEventLog
ArgumentNullException.ThrowIfNull(statement.Advisory);
var vulnerabilityKey = NormalizeKey(statement.VulnerabilityKey, nameof(statement.VulnerabilityKey));
var advisory = CanonicalJsonSerializer.Normalize(statement.Advisory);
var advisoryKey = string.IsNullOrWhiteSpace(statement.AdvisoryKey)
? advisory.AdvisoryKey
: statement.AdvisoryKey.Trim();
var advisory = CanonicalJsonSerializer.Normalize(statement.Advisory);
var advisoryKey = string.IsNullOrWhiteSpace(statement.AdvisoryKey)
? advisory.AdvisoryKey
: statement.AdvisoryKey.Trim();
if (string.IsNullOrWhiteSpace(advisoryKey))
{
@@ -149,30 +162,33 @@ public sealed class AdvisoryEventLog : IAdvisoryEventLog
throw new ArgumentException("Advisory key in payload must match provided advisory key.", nameof(statement));
}
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var hashBytes = ComputeHash(canonicalJson);
var asOf = statement.AsOf.ToUniversalTime();
var inputDocuments = statement.InputDocumentIds?.Count > 0
? statement.InputDocumentIds
.Where(static id => id != Guid.Empty)
.Distinct()
.OrderBy(static id => id)
.ToImmutableArray()
: ImmutableArray<Guid>.Empty;
entries.Add(new AdvisoryStatementEntry(
statement.StatementId ?? Guid.NewGuid(),
vulnerabilityKey,
advisoryKey,
canonicalJson,
hashBytes,
asOf,
recordedAt,
inputDocuments));
}
return entries;
}
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var hashBytes = ComputeHash(canonicalJson);
var asOf = statement.AsOf.ToUniversalTime();
var inputDocuments = statement.InputDocumentIds?.Count > 0
? statement.InputDocumentIds
.Where(static id => id != Guid.Empty)
.Distinct()
.OrderBy(static id => id)
.ToImmutableArray()
: ImmutableArray<Guid>.Empty;
var (provenance, trust) = ResolveStatementMetadata(advisory, statement.Provenance, statement.Trust);
entries.Add(new AdvisoryStatementEntry(
statement.StatementId ?? Guid.NewGuid(),
vulnerabilityKey,
advisoryKey,
canonicalJson,
hashBytes,
asOf,
recordedAt,
inputDocuments,
provenance,
trust));
}
return entries;
}
private static IReadOnlyCollection<AdvisoryConflictEntry> BuildConflictEntries(
IReadOnlyCollection<AdvisoryConflictInput> conflicts,
@@ -202,23 +218,44 @@ public sealed class AdvisoryEventLog : IAdvisoryEventLog
.ToImmutableArray()
: ImmutableArray<Guid>.Empty;
entries.Add(new AdvisoryConflictEntry(
conflict.ConflictId ?? Guid.NewGuid(),
vulnerabilityKey,
canonicalJson,
hashBytes,
asOf,
recordedAt,
statementIds));
entries.Add(new AdvisoryConflictEntry(
conflict.ConflictId ?? Guid.NewGuid(),
vulnerabilityKey,
canonicalJson,
hashBytes,
asOf,
recordedAt,
statementIds,
conflict.Provenance,
conflict.Trust));
}
return entries;
}
private static string NormalizeKey(string value, string parameterName)
{
if (string.IsNullOrWhiteSpace(value))
{
return entries;
}
private static (DsseProvenance?, TrustInfo?) ResolveStatementMetadata(
Advisory advisory,
DsseProvenance? suppliedProvenance,
TrustInfo? suppliedTrust)
{
if (suppliedProvenance is not null && suppliedTrust is not null)
{
return (suppliedProvenance, suppliedTrust);
}
if (AdvisoryDsseMetadataResolver.TryResolve(advisory, out var resolvedProvenance, out var resolvedTrust))
{
suppliedProvenance ??= resolvedProvenance;
suppliedTrust ??= resolvedTrust;
}
return (suppliedProvenance, suppliedTrust);
}
private static string NormalizeKey(string value, string parameterName)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException("Value must be provided.", parameterName);
}

View File

@@ -1,15 +1,22 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Core.Events;
/// <summary>
/// High-level API for recording and replaying advisory statements with deterministic as-of queries.
/// </summary>
public interface IAdvisoryEventLog
{
ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken);
ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken);
}
public interface IAdvisoryEventLog
{
ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken);
ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken);
ValueTask AttachStatementProvenanceAsync(
Guid statementId,
DsseProvenance provenance,
TrustInfo trust,
CancellationToken cancellationToken);
}

View File

@@ -2,7 +2,8 @@ using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Core.Events;
@@ -19,13 +20,19 @@ public interface IAdvisoryEventRepository
IReadOnlyCollection<AdvisoryConflictEntry> conflicts,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<AdvisoryStatementEntry>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken);
}
ValueTask<IReadOnlyList<AdvisoryStatementEntry>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken);
ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken);
ValueTask AttachStatementProvenanceAsync(
Guid statementId,
DsseProvenance provenance,
TrustInfo trust,
CancellationToken cancellationToken);
}

View File

@@ -19,6 +19,7 @@
<ProjectReference Include="..\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Ingestion.Telemetry\StellaOps.Ingestion.Telemetry.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Provenance.Mongo\StellaOps.Provenance.Mongo.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
<ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" />
</ItemGroup>

View File

@@ -6,13 +6,14 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
using System.Text.Json;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
using System.Text.Json;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Merge.Services;
@@ -139,39 +140,45 @@ public sealed class AdvisoryMergeService
return new AdvisoryMergeResult(seedAdvisoryKey, canonicalKey, component, inputs, before, merged, conflictSummaries);
}
private async Task<IReadOnlyList<MergeConflictSummary>> AppendEventLogAsync(
string vulnerabilityKey,
IReadOnlyList<Advisory> inputs,
Advisory merged,
IReadOnlyList<MergeConflictDetail> conflicts,
CancellationToken cancellationToken)
private async Task<IReadOnlyList<MergeConflictSummary>> AppendEventLogAsync(
string vulnerabilityKey,
IReadOnlyList<Advisory> inputs,
Advisory merged,
IReadOnlyList<MergeConflictDetail> conflicts,
CancellationToken cancellationToken)
{
var recordedAt = _timeProvider.GetUtcNow();
var statements = new List<AdvisoryStatementInput>(inputs.Count + 1);
var statementIds = new Dictionary<Advisory, Guid>(ReferenceEqualityComparer.Instance);
foreach (var advisory in inputs)
{
var statementId = Guid.NewGuid();
statementIds[advisory] = statementId;
statements.Add(new AdvisoryStatementInput(
vulnerabilityKey,
advisory,
DetermineAsOf(advisory, recordedAt),
InputDocumentIds: Array.Empty<Guid>(),
StatementId: statementId,
AdvisoryKey: advisory.AdvisoryKey));
}
var canonicalStatementId = Guid.NewGuid();
statementIds[merged] = canonicalStatementId;
statements.Add(new AdvisoryStatementInput(
vulnerabilityKey,
merged,
recordedAt,
InputDocumentIds: Array.Empty<Guid>(),
StatementId: canonicalStatementId,
AdvisoryKey: merged.AdvisoryKey));
foreach (var advisory in inputs)
{
var statementId = Guid.NewGuid();
statementIds[advisory] = statementId;
var (provenance, trust) = ResolveDsseMetadata(advisory);
statements.Add(new AdvisoryStatementInput(
vulnerabilityKey,
advisory,
DetermineAsOf(advisory, recordedAt),
InputDocumentIds: Array.Empty<Guid>(),
StatementId: statementId,
AdvisoryKey: advisory.AdvisoryKey,
Provenance: provenance,
Trust: trust));
}
var canonicalStatementId = Guid.NewGuid();
statementIds[merged] = canonicalStatementId;
var (canonicalProvenance, canonicalTrust) = ResolveDsseMetadata(merged);
statements.Add(new AdvisoryStatementInput(
vulnerabilityKey,
merged,
recordedAt,
InputDocumentIds: Array.Empty<Guid>(),
StatementId: canonicalStatementId,
AdvisoryKey: merged.AdvisoryKey,
Provenance: canonicalProvenance,
Trust: canonicalTrust));
var conflictMaterialization = BuildConflictInputs(conflicts, vulnerabilityKey, statementIds, canonicalStatementId, recordedAt);
var conflictInputs = conflictMaterialization.Inputs;
@@ -198,15 +205,22 @@ public sealed class AdvisoryMergeService
}
}
return conflictSummaries.Count == 0
? Array.Empty<MergeConflictSummary>()
: conflictSummaries.ToArray();
}
private static DateTimeOffset DetermineAsOf(Advisory advisory, DateTimeOffset fallback)
{
return (advisory.Modified ?? advisory.Published ?? fallback).ToUniversalTime();
}
return conflictSummaries.Count == 0
? Array.Empty<MergeConflictSummary>()
: conflictSummaries.ToArray();
}
private static (DsseProvenance?, TrustInfo?) ResolveDsseMetadata(Advisory advisory)
{
return AdvisoryDsseMetadataResolver.TryResolve(advisory, out var dsse, out var trust)
? (dsse, trust)
: (null, null);
}
private static DateTimeOffset DetermineAsOf(Advisory advisory, DateTimeOffset fallback)
{
return (advisory.Modified ?? advisory.Published ?? fallback).ToUniversalTime();
}
private static ConflictMaterialization BuildConflictInputs(
IReadOnlyList<MergeConflictDetail> conflicts,

View File

@@ -27,31 +27,43 @@ public sealed class AdvisoryConflictDocument
[BsonElement("statementIds")]
public List<string> StatementIds { get; set; } = new();
[BsonElement("details")]
public BsonDocument Details { get; set; } = new();
[BsonElement("details")]
public BsonDocument Details { get; set; } = new();
[BsonElement("provenance")]
[BsonIgnoreIfNull]
public BsonDocument? Provenance { get; set; }
[BsonElement("trust")]
[BsonIgnoreIfNull]
public BsonDocument? Trust { get; set; }
}
internal static class AdvisoryConflictDocumentExtensions
{
public static AdvisoryConflictDocument FromRecord(AdvisoryConflictRecord record)
=> new()
{
Id = record.Id.ToString(),
VulnerabilityKey = record.VulnerabilityKey,
ConflictHash = record.ConflictHash,
AsOf = record.AsOf.UtcDateTime,
RecordedAt = record.RecordedAt.UtcDateTime,
StatementIds = record.StatementIds.Select(static id => id.ToString()).ToList(),
Details = (BsonDocument)record.Details.DeepClone(),
};
public static AdvisoryConflictRecord ToRecord(this AdvisoryConflictDocument document)
=> new(
Guid.Parse(document.Id),
document.VulnerabilityKey,
document.ConflictHash,
DateTime.SpecifyKind(document.AsOf, DateTimeKind.Utc),
DateTime.SpecifyKind(document.RecordedAt, DateTimeKind.Utc),
document.StatementIds.Select(static value => Guid.Parse(value)).ToList(),
(BsonDocument)document.Details.DeepClone());
=> new()
{
Id = record.Id.ToString(),
VulnerabilityKey = record.VulnerabilityKey,
ConflictHash = record.ConflictHash,
AsOf = record.AsOf.UtcDateTime,
RecordedAt = record.RecordedAt.UtcDateTime,
StatementIds = record.StatementIds.Select(static id => id.ToString()).ToList(),
Details = (BsonDocument)record.Details.DeepClone(),
Provenance = record.Provenance is null ? null : (BsonDocument)record.Provenance.DeepClone(),
Trust = record.Trust is null ? null : (BsonDocument)record.Trust.DeepClone(),
};
public static AdvisoryConflictRecord ToRecord(this AdvisoryConflictDocument document)
=> new(
Guid.Parse(document.Id),
document.VulnerabilityKey,
document.ConflictHash,
DateTime.SpecifyKind(document.AsOf, DateTimeKind.Utc),
DateTime.SpecifyKind(document.RecordedAt, DateTimeKind.Utc),
document.StatementIds.Select(static value => Guid.Parse(value)).ToList(),
(BsonDocument)document.Details.DeepClone(),
document.Provenance is null ? null : (BsonDocument)document.Provenance.DeepClone(),
document.Trust is null ? null : (BsonDocument)document.Trust.DeepClone());
}

View File

@@ -4,11 +4,13 @@ using MongoDB.Bson;
namespace StellaOps.Concelier.Storage.Mongo.Conflicts;
public sealed record AdvisoryConflictRecord(
Guid Id,
string VulnerabilityKey,
byte[] ConflictHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
IReadOnlyList<Guid> StatementIds,
BsonDocument Details);
public sealed record AdvisoryConflictRecord(
Guid Id,
string VulnerabilityKey,
byte[] ConflictHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
IReadOnlyList<Guid> StatementIds,
BsonDocument Details,
BsonDocument? Provenance = null,
BsonDocument? Trust = null);

View File

@@ -1,224 +1,425 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Conflicts;
using StellaOps.Concelier.Storage.Mongo.Statements;
namespace StellaOps.Concelier.Storage.Mongo.Events;
public sealed class MongoAdvisoryEventRepository : IAdvisoryEventRepository
{
private readonly IAdvisoryStatementStore _statementStore;
private readonly IAdvisoryConflictStore _conflictStore;
public MongoAdvisoryEventRepository(
IAdvisoryStatementStore statementStore,
IAdvisoryConflictStore conflictStore)
{
_statementStore = statementStore ?? throw new ArgumentNullException(nameof(statementStore));
_conflictStore = conflictStore ?? throw new ArgumentNullException(nameof(conflictStore));
}
public async ValueTask InsertStatementsAsync(
IReadOnlyCollection<AdvisoryStatementEntry> statements,
CancellationToken cancellationToken)
{
if (statements is null)
{
throw new ArgumentNullException(nameof(statements));
}
if (statements.Count == 0)
{
return;
}
var records = statements
.Select(static entry =>
{
var payload = BsonDocument.Parse(entry.CanonicalJson);
return new AdvisoryStatementRecord(
entry.StatementId,
entry.VulnerabilityKey,
entry.AdvisoryKey,
entry.StatementHash.ToArray(),
entry.AsOf,
entry.RecordedAt,
payload,
entry.InputDocumentIds.ToArray());
})
.ToList();
await _statementStore.InsertAsync(records, cancellationToken).ConfigureAwait(false);
}
public async ValueTask InsertConflictsAsync(
IReadOnlyCollection<AdvisoryConflictEntry> conflicts,
CancellationToken cancellationToken)
{
if (conflicts is null)
{
throw new ArgumentNullException(nameof(conflicts));
}
if (conflicts.Count == 0)
{
return;
}
var records = conflicts
.Select(static entry =>
{
var payload = BsonDocument.Parse(entry.CanonicalJson);
return new AdvisoryConflictRecord(
entry.ConflictId,
entry.VulnerabilityKey,
entry.ConflictHash.ToArray(),
entry.AsOf,
entry.RecordedAt,
entry.StatementIds.ToArray(),
payload);
})
.ToList();
await _conflictStore.InsertAsync(records, cancellationToken).ConfigureAwait(false);
}
public async ValueTask<IReadOnlyList<AdvisoryStatementEntry>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken)
{
var records = await _statementStore
.GetStatementsAsync(vulnerabilityKey, asOf, cancellationToken)
.ConfigureAwait(false);
if (records.Count == 0)
{
return Array.Empty<AdvisoryStatementEntry>();
}
var entries = records
.Select(static record =>
{
var advisory = CanonicalJsonSerializer.Deserialize<Advisory>(record.Payload.ToJson());
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
return new AdvisoryStatementEntry(
record.Id,
record.VulnerabilityKey,
record.AdvisoryKey,
canonicalJson,
record.StatementHash.ToImmutableArray(),
record.AsOf,
record.RecordedAt,
record.InputDocumentIds.ToImmutableArray());
})
.ToList();
return entries;
}
public async ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken)
{
var records = await _conflictStore
.GetConflictsAsync(vulnerabilityKey, asOf, cancellationToken)
.ConfigureAwait(false);
if (records.Count == 0)
{
return Array.Empty<AdvisoryConflictEntry>();
}
var entries = records
.Select(static record =>
{
var canonicalJson = Canonicalize(record.Details);
return new AdvisoryConflictEntry(
record.Id,
record.VulnerabilityKey,
canonicalJson,
record.ConflictHash.ToImmutableArray(),
record.AsOf,
record.RecordedAt,
record.StatementIds.ToImmutableArray());
})
.ToList();
return entries;
}
private static readonly JsonWriterOptions CanonicalWriterOptions = new()
{
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false,
};
private static string Canonicalize(BsonDocument document)
{
using var json = JsonDocument.Parse(document.ToJson());
using var stream = new MemoryStream();
using (var writer = new Utf8JsonWriter(stream, CanonicalWriterOptions))
{
WriteCanonical(json.RootElement, writer);
}
return Encoding.UTF8.GetString(stream.ToArray());
}
private static void WriteCanonical(JsonElement element, Utf8JsonWriter writer)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
writer.WriteStartObject();
foreach (var property in element.EnumerateObject().OrderBy(static p => p.Name, StringComparer.Ordinal))
{
writer.WritePropertyName(property.Name);
WriteCanonical(property.Value, writer);
}
writer.WriteEndObject();
break;
case JsonValueKind.Array:
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(item, writer);
}
writer.WriteEndArray();
break;
case JsonValueKind.String:
writer.WriteStringValue(element.GetString());
break;
case JsonValueKind.Number:
writer.WriteRawValue(element.GetRawText());
break;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
break;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
break;
case JsonValueKind.Null:
writer.WriteNullValue();
break;
default:
writer.WriteRawValue(element.GetRawText());
break;
}
}
}
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Conflicts;
using StellaOps.Concelier.Storage.Mongo.Statements;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Storage.Mongo.Events;
public sealed class MongoAdvisoryEventRepository : IAdvisoryEventRepository
{
private readonly IAdvisoryStatementStore _statementStore;
private readonly IAdvisoryConflictStore _conflictStore;
public MongoAdvisoryEventRepository(
IAdvisoryStatementStore statementStore,
IAdvisoryConflictStore conflictStore)
{
_statementStore = statementStore ?? throw new ArgumentNullException(nameof(statementStore));
_conflictStore = conflictStore ?? throw new ArgumentNullException(nameof(conflictStore));
}
public async ValueTask InsertStatementsAsync(
IReadOnlyCollection<AdvisoryStatementEntry> statements,
CancellationToken cancellationToken)
{
if (statements is null)
{
throw new ArgumentNullException(nameof(statements));
}
if (statements.Count == 0)
{
return;
}
var records = statements
.Select(static entry =>
{
var payload = BsonDocument.Parse(entry.CanonicalJson);
var (provenanceDoc, trustDoc) = BuildMetadata(entry.Provenance, entry.Trust);
return new AdvisoryStatementRecord(
entry.StatementId,
entry.VulnerabilityKey,
entry.AdvisoryKey,
entry.StatementHash.ToArray(),
entry.AsOf,
entry.RecordedAt,
payload,
entry.InputDocumentIds.ToArray(),
provenanceDoc,
trustDoc);
})
.ToList();
await _statementStore.InsertAsync(records, cancellationToken).ConfigureAwait(false);
}
public async ValueTask InsertConflictsAsync(
IReadOnlyCollection<AdvisoryConflictEntry> conflicts,
CancellationToken cancellationToken)
{
if (conflicts is null)
{
throw new ArgumentNullException(nameof(conflicts));
}
if (conflicts.Count == 0)
{
return;
}
var records = conflicts
.Select(static entry =>
{
var payload = BsonDocument.Parse(entry.CanonicalJson);
var (provenanceDoc, trustDoc) = BuildMetadata(entry.Provenance, entry.Trust);
return new AdvisoryConflictRecord(
entry.ConflictId,
entry.VulnerabilityKey,
entry.ConflictHash.ToArray(),
entry.AsOf,
entry.RecordedAt,
entry.StatementIds.ToArray(),
payload,
provenanceDoc,
trustDoc);
})
.ToList();
await _conflictStore.InsertAsync(records, cancellationToken).ConfigureAwait(false);
}
public async ValueTask<IReadOnlyList<AdvisoryStatementEntry>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken)
{
var records = await _statementStore
.GetStatementsAsync(vulnerabilityKey, asOf, cancellationToken)
.ConfigureAwait(false);
if (records.Count == 0)
{
return Array.Empty<AdvisoryStatementEntry>();
}
var entries = records
.Select(static record =>
{
var advisory = CanonicalJsonSerializer.Deserialize<Advisory>(record.Payload.ToJson());
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var (provenance, trust) = ParseMetadata(record.Provenance, record.Trust);
return new AdvisoryStatementEntry(
record.Id,
record.VulnerabilityKey,
record.AdvisoryKey,
canonicalJson,
record.StatementHash.ToImmutableArray(),
record.AsOf,
record.RecordedAt,
record.InputDocumentIds.ToImmutableArray(),
provenance,
trust);
})
.ToList();
return entries;
}
public async ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken)
{
var records = await _conflictStore
.GetConflictsAsync(vulnerabilityKey, asOf, cancellationToken)
.ConfigureAwait(false);
if (records.Count == 0)
{
return Array.Empty<AdvisoryConflictEntry>();
}
var entries = records
.Select(static record =>
{
var canonicalJson = Canonicalize(record.Details);
var (provenance, trust) = ParseMetadata(record.Provenance, record.Trust);
return new AdvisoryConflictEntry(
record.Id,
record.VulnerabilityKey,
canonicalJson,
record.ConflictHash.ToImmutableArray(),
record.AsOf,
record.RecordedAt,
record.StatementIds.ToImmutableArray(),
provenance,
trust);
})
.ToList();
return entries;
}
public async ValueTask AttachStatementProvenanceAsync(
Guid statementId,
DsseProvenance dsse,
TrustInfo trust,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(dsse);
ArgumentNullException.ThrowIfNull(trust);
var (provenanceDoc, trustDoc) = BuildMetadata(dsse, trust);
if (provenanceDoc is null || trustDoc is null)
{
throw new InvalidOperationException("Failed to build provenance documents.");
}
await _statementStore
.UpdateProvenanceAsync(statementId, provenanceDoc, trustDoc, cancellationToken)
.ConfigureAwait(false);
}
private static readonly JsonWriterOptions CanonicalWriterOptions = new()
{
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false,
};
private static string Canonicalize(BsonDocument document)
{
using var json = JsonDocument.Parse(document.ToJson());
using var stream = new MemoryStream();
using (var writer = new Utf8JsonWriter(stream, CanonicalWriterOptions))
{
WriteCanonical(json.RootElement, writer);
}
return Encoding.UTF8.GetString(stream.ToArray());
}
private static (BsonDocument? Provenance, BsonDocument? Trust) BuildMetadata(DsseProvenance? provenance, TrustInfo? trust)
{
if (provenance is null || trust is null)
{
return (null, null);
}
var metadata = new BsonDocument();
metadata.AttachDsseProvenance(provenance, trust);
var provenanceDoc = metadata.TryGetValue("provenance", out var provenanceValue)
? (BsonDocument)provenanceValue.DeepClone()
: null;
var trustDoc = metadata.TryGetValue("trust", out var trustValue)
? (BsonDocument)trustValue.DeepClone()
: null;
return (provenanceDoc, trustDoc);
}
private static (DsseProvenance?, TrustInfo?) ParseMetadata(BsonDocument? provenanceDoc, BsonDocument? trustDoc)
{
DsseProvenance? dsse = null;
if (provenanceDoc is not null &&
provenanceDoc.TryGetValue("dsse", out var dsseValue) &&
dsseValue is BsonDocument dsseBody)
{
if (TryGetString(dsseBody, "envelopeDigest", out var envelopeDigest) &&
TryGetString(dsseBody, "payloadType", out var payloadType) &&
dsseBody.TryGetValue("key", out var keyValue) &&
keyValue is BsonDocument keyDoc &&
TryGetString(keyDoc, "keyId", out var keyId))
{
var keyInfo = new DsseKeyInfo
{
KeyId = keyId,
Issuer = GetOptionalString(keyDoc, "issuer"),
Algo = GetOptionalString(keyDoc, "algo"),
};
dsse = new DsseProvenance
{
EnvelopeDigest = envelopeDigest,
PayloadType = payloadType,
Key = keyInfo,
Rekor = ParseRekor(dsseBody),
Chain = ParseChain(dsseBody)
};
}
}
TrustInfo? trust = null;
if (trustDoc is not null)
{
trust = new TrustInfo
{
Verified = trustDoc.TryGetValue("verified", out var verifiedValue) && verifiedValue.ToBoolean(),
Verifier = GetOptionalString(trustDoc, "verifier"),
Witnesses = trustDoc.TryGetValue("witnesses", out var witnessValue) && witnessValue.IsInt32 ? witnessValue.AsInt32 : (int?)null,
PolicyScore = trustDoc.TryGetValue("policyScore", out var scoreValue) && scoreValue.IsNumeric ? scoreValue.AsDouble : (double?)null
};
}
return (dsse, trust);
}
private static DsseRekorInfo? ParseRekor(BsonDocument dsseBody)
{
if (!dsseBody.TryGetValue("rekor", out var rekorValue) || !rekorValue.IsBsonDocument)
{
return null;
}
var rekorDoc = rekorValue.AsBsonDocument;
if (!TryGetInt64(rekorDoc, "logIndex", out var logIndex))
{
return null;
}
return new DsseRekorInfo
{
LogIndex = logIndex,
Uuid = GetOptionalString(rekorDoc, "uuid") ?? string.Empty,
IntegratedTime = TryGetInt64(rekorDoc, "integratedTime", out var integratedTime) ? integratedTime : null,
MirrorSeq = TryGetInt64(rekorDoc, "mirrorSeq", out var mirrorSeq) ? mirrorSeq : null
};
}
private static IReadOnlyCollection<DsseChainLink>? ParseChain(BsonDocument dsseBody)
{
if (!dsseBody.TryGetValue("chain", out var chainValue) || !chainValue.IsBsonArray)
{
return null;
}
var links = new List<DsseChainLink>();
foreach (var element in chainValue.AsBsonArray)
{
if (!element.IsBsonDocument)
{
continue;
}
var linkDoc = element.AsBsonDocument;
if (!TryGetString(linkDoc, "type", out var type) ||
!TryGetString(linkDoc, "id", out var id) ||
!TryGetString(linkDoc, "digest", out var digest))
{
continue;
}
links.Add(new DsseChainLink
{
Type = type,
Id = id,
Digest = digest
});
}
return links.Count == 0 ? null : links;
}
private static bool TryGetString(BsonDocument document, string name, out string value)
{
if (document.TryGetValue(name, out var bsonValue) && bsonValue.IsString)
{
value = bsonValue.AsString;
return true;
}
value = string.Empty;
return false;
}
private static string? GetOptionalString(BsonDocument document, string name)
=> document.TryGetValue(name, out var bsonValue) && bsonValue.IsString ? bsonValue.AsString : null;
private static bool TryGetInt64(BsonDocument document, string name, out long value)
{
if (document.TryGetValue(name, out var bsonValue))
{
if (bsonValue.IsInt64)
{
value = bsonValue.AsInt64;
return true;
}
if (bsonValue.IsInt32)
{
value = bsonValue.AsInt32;
return true;
}
if (bsonValue.IsString && long.TryParse(bsonValue.AsString, out var parsed))
{
value = parsed;
return true;
}
}
value = 0;
return false;
}
private static void WriteCanonical(JsonElement element, Utf8JsonWriter writer)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
writer.WriteStartObject();
foreach (var property in element.EnumerateObject().OrderBy(static p => p.Name, StringComparer.Ordinal))
{
writer.WritePropertyName(property.Name);
WriteCanonical(property.Value, writer);
}
writer.WriteEndObject();
break;
case JsonValueKind.Array:
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(item, writer);
}
writer.WriteEndArray();
break;
case JsonValueKind.String:
writer.WriteStringValue(element.GetString());
break;
case JsonValueKind.Number:
writer.WriteRawValue(element.GetRawText());
break;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
break;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
break;
case JsonValueKind.Null:
writer.WriteNullValue();
break;
default:
writer.WriteRawValue(element.GetRawText());
break;
}
}
}

View File

@@ -28,7 +28,15 @@ public sealed class AdvisoryStatementDocument
public DateTime RecordedAt { get; set; }
[BsonElement("payload")]
public BsonDocument Payload { get; set; } = new();
public BsonDocument Payload { get; set; } = new();
[BsonElement("provenance")]
[BsonIgnoreIfNull]
public BsonDocument? Provenance { get; set; }
[BsonElement("trust")]
[BsonIgnoreIfNull]
public BsonDocument? Trust { get; set; }
[BsonElement("inputDocuments")]
public List<string> InputDocuments { get; set; } = new();
@@ -37,26 +45,30 @@ public sealed class AdvisoryStatementDocument
internal static class AdvisoryStatementDocumentExtensions
{
public static AdvisoryStatementDocument FromRecord(AdvisoryStatementRecord record)
=> new()
{
Id = record.Id.ToString(),
VulnerabilityKey = record.VulnerabilityKey,
AdvisoryKey = record.AdvisoryKey,
StatementHash = record.StatementHash,
AsOf = record.AsOf.UtcDateTime,
RecordedAt = record.RecordedAt.UtcDateTime,
Payload = (BsonDocument)record.Payload.DeepClone(),
InputDocuments = record.InputDocumentIds.Select(static id => id.ToString()).ToList(),
};
=> new()
{
Id = record.Id.ToString(),
VulnerabilityKey = record.VulnerabilityKey,
AdvisoryKey = record.AdvisoryKey,
StatementHash = record.StatementHash,
AsOf = record.AsOf.UtcDateTime,
RecordedAt = record.RecordedAt.UtcDateTime,
Payload = (BsonDocument)record.Payload.DeepClone(),
Provenance = record.Provenance is null ? null : (BsonDocument)record.Provenance.DeepClone(),
Trust = record.Trust is null ? null : (BsonDocument)record.Trust.DeepClone(),
InputDocuments = record.InputDocumentIds.Select(static id => id.ToString()).ToList(),
};
public static AdvisoryStatementRecord ToRecord(this AdvisoryStatementDocument document)
=> new(
Guid.Parse(document.Id),
document.VulnerabilityKey,
document.AdvisoryKey,
document.StatementHash,
DateTime.SpecifyKind(document.AsOf, DateTimeKind.Utc),
DateTime.SpecifyKind(document.RecordedAt, DateTimeKind.Utc),
(BsonDocument)document.Payload.DeepClone(),
document.InputDocuments.Select(static value => Guid.Parse(value)).ToList());
=> new(
Guid.Parse(document.Id),
document.VulnerabilityKey,
document.AdvisoryKey,
document.StatementHash,
DateTime.SpecifyKind(document.AsOf, DateTimeKind.Utc),
DateTime.SpecifyKind(document.RecordedAt, DateTimeKind.Utc),
(BsonDocument)document.Payload.DeepClone(),
document.InputDocuments.Select(static value => Guid.Parse(value)).ToList(),
document.Provenance is null ? null : (BsonDocument)document.Provenance.DeepClone(),
document.Trust is null ? null : (BsonDocument)document.Trust.DeepClone());
}

View File

@@ -4,12 +4,14 @@ using MongoDB.Bson;
namespace StellaOps.Concelier.Storage.Mongo.Statements;
public sealed record AdvisoryStatementRecord(
Guid Id,
string VulnerabilityKey,
string AdvisoryKey,
byte[] StatementHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
BsonDocument Payload,
IReadOnlyList<Guid> InputDocumentIds);
public sealed record AdvisoryStatementRecord(
Guid Id,
string VulnerabilityKey,
string AdvisoryKey,
byte[] StatementHash,
DateTimeOffset AsOf,
DateTimeOffset RecordedAt,
BsonDocument Payload,
IReadOnlyList<Guid> InputDocumentIds,
BsonDocument? Provenance = null,
BsonDocument? Trust = null);

View File

@@ -3,23 +3,31 @@ using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Concelier.Storage.Mongo.Statements;
public interface IAdvisoryStatementStore
{
ValueTask InsertAsync(
IReadOnlyCollection<AdvisoryStatementRecord> statements,
CancellationToken cancellationToken,
IClientSessionHandle? session = null);
ValueTask<IReadOnlyList<AdvisoryStatementRecord>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken,
IClientSessionHandle? session = null);
}
public interface IAdvisoryStatementStore
{
ValueTask InsertAsync(
IReadOnlyCollection<AdvisoryStatementRecord> statements,
CancellationToken cancellationToken,
IClientSessionHandle? session = null);
ValueTask<IReadOnlyList<AdvisoryStatementRecord>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken,
IClientSessionHandle? session = null);
ValueTask UpdateProvenanceAsync(
Guid statementId,
BsonDocument provenance,
BsonDocument trust,
CancellationToken cancellationToken,
IClientSessionHandle? session = null);
}
public sealed class AdvisoryStatementStore : IAdvisoryStatementStore
{
@@ -63,13 +71,13 @@ public sealed class AdvisoryStatementStore : IAdvisoryStatementStore
}
}
public async ValueTask<IReadOnlyList<AdvisoryStatementRecord>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken,
IClientSessionHandle? session = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityKey);
public async ValueTask<IReadOnlyList<AdvisoryStatementRecord>> GetStatementsAsync(
string vulnerabilityKey,
DateTimeOffset? asOf,
CancellationToken cancellationToken,
IClientSessionHandle? session = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityKey);
var filter = Builders<AdvisoryStatementDocument>.Filter.Eq(document => document.VulnerabilityKey, vulnerabilityKey);
@@ -88,6 +96,31 @@ public sealed class AdvisoryStatementStore : IAdvisoryStatementStore
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(static document => document.ToRecord()).ToList();
}
}
return documents.Select(static document => document.ToRecord()).ToList();
}
public async ValueTask UpdateProvenanceAsync(
Guid statementId,
BsonDocument provenance,
BsonDocument trust,
CancellationToken cancellationToken,
IClientSessionHandle? session = null)
{
ArgumentNullException.ThrowIfNull(provenance);
ArgumentNullException.ThrowIfNull(trust);
var filter = Builders<AdvisoryStatementDocument>.Filter.Eq(document => document.Id, statementId.ToString());
var update = Builders<AdvisoryStatementDocument>.Update
.Set(document => document.Provenance, provenance)
.Set(document => document.Trust, trust);
var result = session is null
? await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false)
: await _collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false);
if (result.MatchedCount == 0)
{
throw new InvalidOperationException($"Statement {statementId} not found.");
}
}
}

View File

@@ -15,5 +15,6 @@
<ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
<ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Ingestion.Telemetry\StellaOps.Ingestion.Telemetry.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Provenance.Mongo\StellaOps.Provenance.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -4,21 +4,22 @@ using System.Collections.Immutable;
using System.Linq;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using Xunit;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Provenance.Mongo;
using Xunit;
namespace StellaOps.Concelier.Core.Tests.Events;
public sealed class AdvisoryEventLogTests
{
[Fact]
public async Task AppendAsync_PersistsCanonicalStatementEntries()
{
var repository = new FakeRepository();
var timeProvider = new FixedTimeProvider(DateTimeOffset.UtcNow);
var log = new AdvisoryEventLog(repository, timeProvider);
public async Task AppendAsync_PersistsCanonicalStatementEntries()
{
var repository = new FakeRepository();
var timeProvider = new FixedTimeProvider(DateTimeOffset.UtcNow);
var log = new AdvisoryEventLog(repository, timeProvider);
var advisory = new Advisory(
"adv-1",
@@ -48,9 +49,54 @@ public sealed class AdvisoryEventLogTests
Assert.Equal("cve-2025-0001", entry.VulnerabilityKey);
Assert.Equal("adv-1", entry.AdvisoryKey);
Assert.Equal(DateTimeOffset.Parse("2025-10-03T00:00:00Z"), entry.AsOf);
Assert.Contains("\"advisoryKey\":\"adv-1\"", entry.CanonicalJson);
Assert.NotEqual(ImmutableArray<byte>.Empty, entry.StatementHash);
}
Assert.Contains("\"advisoryKey\":\"adv-1\"", entry.CanonicalJson);
Assert.NotEqual(ImmutableArray<byte>.Empty, entry.StatementHash);
}
[Fact]
public async Task AppendAsync_AttachesDsseMetadataFromAdvisoryProvenance()
{
var repository = new FakeRepository();
var timeProvider = new FixedTimeProvider(DateTimeOffset.Parse("2025-11-11T00:00:00Z"));
var log = new AdvisoryEventLog(repository, timeProvider);
var dsseMetadata = new AdvisoryProvenance(
source: "attestor",
kind: "dsse",
value: BuildDsseMetadataJson(),
recordedAt: DateTimeOffset.Parse("2025-11-10T00:00:00Z"));
var advisory = new Advisory(
"adv-2",
"DSSE-backed",
summary: null,
language: "en",
published: DateTimeOffset.Parse("2025-11-09T00:00:00Z"),
modified: DateTimeOffset.Parse("2025-11-10T00:00:00Z"),
severity: "medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-7777" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { dsseMetadata });
var statementInput = new AdvisoryStatementInput(
VulnerabilityKey: "CVE-2025-7777",
Advisory: advisory,
AsOf: DateTimeOffset.Parse("2025-11-10T12:00:00Z"),
InputDocumentIds: Array.Empty<Guid>());
await log.AppendAsync(new AdvisoryEventAppendRequest(new[] { statementInput }), CancellationToken.None);
var entry = Assert.Single(repository.InsertedStatements);
Assert.NotNull(entry.Provenance);
Assert.NotNull(entry.Trust);
Assert.Equal("sha256:feedface", entry.Provenance!.EnvelopeDigest);
Assert.Equal(1337, entry.Provenance.Rekor!.LogIndex);
Assert.True(entry.Trust!.Verified);
Assert.Equal("Authority@stella", entry.Trust.Verifier);
}
[Fact]
public async Task AppendAsync_PersistsConflictsWithCanonicalizedJson()
@@ -190,8 +236,8 @@ public sealed class AdvisoryEventLogTests
Assert.Equal("{\"reason\":\"conflict\"}", replay.Conflicts[0].CanonicalJson);
}
private sealed class FakeRepository : IAdvisoryEventRepository
{
private sealed class FakeRepository : IAdvisoryEventRepository
{
public List<AdvisoryStatementEntry> InsertedStatements { get; } = new();
public List<AdvisoryConflictEntry> InsertedConflicts { get; } = new();
@@ -217,21 +263,61 @@ public sealed class AdvisoryEventLogTests
string.Equals(entry.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal) &&
(!asOf.HasValue || entry.AsOf <= asOf.Value)).ToList());
public ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyList<AdvisoryConflictEntry>>(StoredConflicts.Where(entry =>
string.Equals(entry.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal) &&
(!asOf.HasValue || entry.AsOf <= asOf.Value)).ToList());
}
private sealed class FixedTimeProvider : TimeProvider
{
public ValueTask<IReadOnlyList<AdvisoryConflictEntry>> GetConflictsAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult<IReadOnlyList<AdvisoryConflictEntry>>(StoredConflicts.Where(entry =>
string.Equals(entry.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal) &&
(!asOf.HasValue || entry.AsOf <= asOf.Value)).ToList());
public ValueTask AttachStatementProvenanceAsync(
Guid statementId,
DsseProvenance provenance,
TrustInfo trust,
CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _now;
public FixedTimeProvider(DateTimeOffset now)
{
_now = now.ToUniversalTime();
}
public override DateTimeOffset GetUtcNow() => _now;
}
}
}
public override DateTimeOffset GetUtcNow() => _now;
}
private static string BuildDsseMetadataJson()
{
var payload = new
{
dsse = new
{
envelopeDigest = "sha256:feedface",
payloadType = "application/vnd.in-toto+json",
key = new
{
keyId = "cosign:SHA256-PKIX:fixture",
issuer = "Authority@stella",
algo = "Ed25519"
},
rekor = new
{
logIndex = 1337,
uuid = "11111111-2222-3333-4444-555555555555",
integratedTime = 1731081600
}
},
trust = new
{
verified = true,
verifier = "Authority@stella",
witnesses = 1,
policyScore = 1.0
}
};
return JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web));
}
}

View File

@@ -3,11 +3,12 @@ using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Core.Noise;
using StellaOps.Concelier.Models;
using Xunit;
using System.Threading.Tasks;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Core.Noise;
using StellaOps.Concelier.Models;
using StellaOps.Provenance.Mongo;
using Xunit;
namespace StellaOps.Concelier.Core.Tests.Noise;
@@ -249,12 +250,19 @@ public sealed class NoisePriorServiceTests
_replay = replay;
}
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException("Append operations are not required for tests.");
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult(_replay);
}
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException("Append operations are not required for tests.");
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
=> ValueTask.FromResult(_replay);
public ValueTask AttachStatementProvenanceAsync(
Guid statementId,
DsseProvenance provenance,
TrustInfo trust,
CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
}
private sealed class FakeNoisePriorRepository : INoisePriorRepository
{

View File

@@ -1,110 +1,223 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Text;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Conflicts;
using StellaOps.Concelier.Storage.Mongo.Events;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Conflicts;
using StellaOps.Concelier.Storage.Mongo.Events;
using StellaOps.Concelier.Storage.Mongo.Statements;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Testing;
using StellaOps.Cryptography;
using StellaOps.Provenance.Mongo;
using Xunit;
namespace StellaOps.Concelier.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
namespace StellaOps.Concelier.Storage.Mongo.Tests;
[Collection("mongo-fixture")]
public sealed class MongoAdvisoryEventRepositoryTests
{
private readonly IMongoDatabase _database;
private readonly MongoAdvisoryEventRepository _repository;
private static readonly ICryptoHash Hash = CryptoHashFactory.CreateDefault();
public MongoAdvisoryEventRepositoryTests(MongoIntegrationFixture fixture)
{
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
var statementStore = new AdvisoryStatementStore(_database);
var conflictStore = new AdvisoryConflictStore(_database);
_repository = new MongoAdvisoryEventRepository(statementStore, conflictStore);
}
[Fact]
public async Task InsertAndFetchStatements_RoundTripsCanonicalPayload()
{
var advisory = CreateSampleAdvisory("CVE-2025-7777", "Sample advisory");
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
public MongoAdvisoryEventRepositoryTests(MongoIntegrationFixture fixture)
{
_database = fixture.Database ?? throw new ArgumentNullException(nameof(fixture.Database));
var statementStore = new AdvisoryStatementStore(_database);
var conflictStore = new AdvisoryConflictStore(_database);
_repository = new MongoAdvisoryEventRepository(statementStore, conflictStore);
}
[Fact]
public async Task InsertAndFetchStatements_RoundTripsCanonicalPayload()
{
var advisory = CreateSampleAdvisory("CVE-2025-7777", "Sample advisory");
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
var hash = ImmutableArray.Create(digest);
var entry = new AdvisoryStatementEntry(
Guid.NewGuid(),
"CVE-2025-7777",
"CVE-2025-7777",
canonicalJson,
hash,
DateTimeOffset.Parse("2025-10-19T14:00:00Z"),
DateTimeOffset.Parse("2025-10-19T14:05:00Z"),
ImmutableArray<Guid>.Empty);
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
var results = await _repository.GetStatementsAsync("CVE-2025-7777", null, CancellationToken.None);
var snapshot = Assert.Single(results);
Assert.Equal(entry.StatementId, snapshot.StatementId);
Assert.Equal(entry.CanonicalJson, snapshot.CanonicalJson);
Assert.True(entry.StatementHash.SequenceEqual(snapshot.StatementHash));
}
[Fact]
public async Task InsertAndFetchConflicts_PreservesDetails()
{
var detailJson = CanonicalJsonSerializer.Serialize(new ConflictPayload("severity", "mismatch"));
var entry = new AdvisoryStatementEntry(
Guid.NewGuid(),
"CVE-2025-7777",
"CVE-2025-7777",
canonicalJson,
hash,
DateTimeOffset.Parse("2025-10-19T14:00:00Z"),
DateTimeOffset.Parse("2025-10-19T14:05:00Z"),
ImmutableArray<Guid>.Empty);
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
var results = await _repository.GetStatementsAsync("CVE-2025-7777", null, CancellationToken.None);
var snapshot = Assert.Single(results);
Assert.Equal(entry.StatementId, snapshot.StatementId);
Assert.Equal(entry.CanonicalJson, snapshot.CanonicalJson);
Assert.True(entry.StatementHash.SequenceEqual(snapshot.StatementHash));
}
[Fact]
public async Task InsertAndFetchConflicts_PreservesDetails()
{
var detailJson = CanonicalJsonSerializer.Serialize(new ConflictPayload("severity", "mismatch"));
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(detailJson), HashAlgorithms.Sha256);
var hash = ImmutableArray.Create(digest);
var statementIds = ImmutableArray.Create(Guid.NewGuid(), Guid.NewGuid());
var entry = new AdvisoryConflictEntry(
Guid.NewGuid(),
"CVE-2025-4242",
detailJson,
hash,
DateTimeOffset.Parse("2025-10-19T15:00:00Z"),
DateTimeOffset.Parse("2025-10-19T15:05:00Z"),
statementIds);
await _repository.InsertConflictsAsync(new[] { entry }, CancellationToken.None);
var results = await _repository.GetConflictsAsync("CVE-2025-4242", null, CancellationToken.None);
var conflict = Assert.Single(results);
Assert.Equal(entry.CanonicalJson, conflict.CanonicalJson);
Assert.True(entry.StatementIds.SequenceEqual(conflict.StatementIds));
Assert.True(entry.ConflictHash.SequenceEqual(conflict.ConflictHash));
}
private static Advisory CreateSampleAdvisory(string key, string summary)
{
var provenance = new AdvisoryProvenance("nvd", "document", key, DateTimeOffset.Parse("2025-10-18T00:00:00Z"), new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
key,
key,
summary,
"en",
DateTimeOffset.Parse("2025-10-17T00:00:00Z"),
DateTimeOffset.Parse("2025-10-18T00:00:00Z"),
"medium",
exploitKnown: false,
aliases: new[] { key },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
private sealed record ConflictPayload(string Type, string Reason);
}
var statementIds = ImmutableArray.Create(Guid.NewGuid(), Guid.NewGuid());
var entry = new AdvisoryConflictEntry(
Guid.NewGuid(),
"CVE-2025-4242",
detailJson,
hash,
DateTimeOffset.Parse("2025-10-19T15:00:00Z"),
DateTimeOffset.Parse("2025-10-19T15:05:00Z"),
statementIds);
await _repository.InsertConflictsAsync(new[] { entry }, CancellationToken.None);
var results = await _repository.GetConflictsAsync("CVE-2025-4242", null, CancellationToken.None);
var conflict = Assert.Single(results);
Assert.Equal(entry.CanonicalJson, conflict.CanonicalJson);
Assert.True(entry.StatementIds.SequenceEqual(conflict.StatementIds));
Assert.True(entry.ConflictHash.SequenceEqual(conflict.ConflictHash));
}
[Fact]
public async Task InsertStatementsAsync_PersistsProvenanceMetadata()
{
var advisory = CreateSampleAdvisory("CVE-2025-8888", "Metadata coverage");
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
var hash = ImmutableArray.Create(digest);
var (dsse, trust) = CreateSampleDsseMetadata();
var entry = new AdvisoryStatementEntry(
Guid.NewGuid(),
"CVE-2025-8888",
"CVE-2025-8888",
canonicalJson,
hash,
DateTimeOffset.Parse("2025-10-20T10:00:00Z"),
DateTimeOffset.Parse("2025-10-20T10:05:00Z"),
ImmutableArray<Guid>.Empty,
dsse,
trust);
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
var statements = _database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements);
var stored = await statements
.Find(Builders<BsonDocument>.Filter.Eq("_id", entry.StatementId.ToString()))
.FirstOrDefaultAsync();
Assert.NotNull(stored);
var provenance = stored!["provenance"].AsBsonDocument["dsse"].AsBsonDocument;
Assert.Equal(dsse.EnvelopeDigest, provenance["envelopeDigest"].AsString);
Assert.Equal(dsse.Key.KeyId, provenance["key"].AsBsonDocument["keyId"].AsString);
var trustDoc = stored["trust"].AsBsonDocument;
Assert.Equal(trust.Verifier, trustDoc["verifier"].AsString);
Assert.Equal(trust.Witnesses, trustDoc["witnesses"].AsInt32);
var roundTrip = await _repository.GetStatementsAsync("CVE-2025-8888", null, CancellationToken.None);
var hydrated = Assert.Single(roundTrip);
Assert.NotNull(hydrated.Provenance);
Assert.NotNull(hydrated.Trust);
Assert.Equal(dsse.EnvelopeDigest, hydrated.Provenance!.EnvelopeDigest);
Assert.Equal(trust.Verifier, hydrated.Trust!.Verifier);
}
private static Advisory CreateSampleAdvisory(string key, string summary)
{
var provenance = new AdvisoryProvenance("nvd", "document", key, DateTimeOffset.Parse("2025-10-18T00:00:00Z"), new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
key,
key,
summary,
"en",
DateTimeOffset.Parse("2025-10-17T00:00:00Z"),
DateTimeOffset.Parse("2025-10-18T00:00:00Z"),
"medium",
exploitKnown: false,
aliases: new[] { key },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
[Fact]
public async Task AttachStatementProvenanceAsync_BackfillsExistingRecord()
{
var advisory = CreateSampleAdvisory("CVE-2025-9999", "Backfill metadata");
var canonicalJson = CanonicalJsonSerializer.Serialize(advisory);
var digest = Hash.ComputeHash(Encoding.UTF8.GetBytes(canonicalJson), HashAlgorithms.Sha256);
var hash = ImmutableArray.Create(digest);
var entry = new AdvisoryStatementEntry(
Guid.NewGuid(),
"CVE-2025-9999",
"CVE-2025-9999",
canonicalJson,
hash,
DateTimeOffset.Parse("2025-10-21T10:00:00Z"),
DateTimeOffset.Parse("2025-10-21T10:05:00Z"),
ImmutableArray<Guid>.Empty);
await _repository.InsertStatementsAsync(new[] { entry }, CancellationToken.None);
var (dsse, trust) = CreateSampleDsseMetadata();
await _repository.AttachStatementProvenanceAsync(entry.StatementId, dsse, trust, CancellationToken.None);
var statements = await _repository.GetStatementsAsync("CVE-2025-9999", null, CancellationToken.None);
var updated = Assert.Single(statements);
Assert.NotNull(updated.Provenance);
Assert.NotNull(updated.Trust);
Assert.Equal(dsse.EnvelopeDigest, updated.Provenance!.EnvelopeDigest);
Assert.Equal(trust.Verifier, updated.Trust!.Verifier);
}
private static (DsseProvenance Provenance, TrustInfo Trust) CreateSampleDsseMetadata()
{
var provenance = new DsseProvenance
{
EnvelopeDigest = "sha256:deadbeef",
PayloadType = "application/vnd.in-toto+json",
Key = new DsseKeyInfo
{
KeyId = "cosign:SHA256-PKIX:TEST",
Issuer = "fulcio",
Algo = "ECDSA"
},
Rekor = new DsseRekorInfo
{
LogIndex = 42,
Uuid = Guid.Parse("2d4d5f7c-1111-4a01-b9cb-aa42022a0a8c").ToString(),
IntegratedTime = 1_700_000_000
}
};
var trust = new TrustInfo
{
Verified = true,
Verifier = "Authority@stella",
Witnesses = 2,
PolicyScore = 0.9
};
return (provenance, trust);
}
private sealed record ConflictPayload(string Type, string Reason);
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Concelier.WebService.Services;
using StellaOps.Concelier.WebService.Diagnostics;
@@ -12,7 +11,7 @@ namespace StellaOps.Concelier.WebService.Tests;
public sealed class AdvisoryAiTelemetryTests : IDisposable
{
private readonly MeterListener _listener;
private readonly List<Measurement<long>> _guardrailMeasurements = new();
private readonly List<(long Value, KeyValuePair<string, object?>[] Tags)> _guardrailMeasurements = new();
public AdvisoryAiTelemetryTests()
{
@@ -31,7 +30,7 @@ public sealed class AdvisoryAiTelemetryTests : IDisposable
if (instrument.Meter.Name == AdvisoryAiMetrics.MeterName &&
instrument.Name == "advisory_ai_guardrail_blocks_total")
{
_guardrailMeasurements.Add(new Measurement<long>(measurement, tags, state));
_guardrailMeasurements.Add((measurement, tags.ToArray()));
}
});
_listener.Start();
@@ -58,10 +57,20 @@ public sealed class AdvisoryAiTelemetryTests : IDisposable
Duration: TimeSpan.FromMilliseconds(5),
GuardrailCounts: guardrailCounts));
_guardrailMeasurements.Should().ContainSingle();
var measurement = _guardrailMeasurements[0];
measurement.Value.Should().Be(2);
measurement.Tags.Should().Contain(tag => tag.Key == "cache" && (string?)tag.Value == "hit");
var measurement = Assert.Single(_guardrailMeasurements);
Assert.Equal(2, measurement.Value);
var cacheHitTagFound = false;
foreach (var tag in measurement.Tags)
{
if (tag.Key == "cache" && (string?)tag.Value == "hit")
{
cacheHitTagFound = true;
break;
}
}
Assert.True(cacheHitTagFound, "guardrail measurement should be tagged with cache hit outcome.");
}
public void Dispose()

View File

@@ -31,6 +31,7 @@ using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Observations;
using StellaOps.Concelier.Core.Raw;
using StellaOps.Concelier.WebService.Jobs;
@@ -265,42 +266,46 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
CreateAdvisoryRawDocument("tenant-a", "nvd", "tenant-a:chunk:newest", newerHash, newestRaw.DeepClone().AsBsonDocument),
CreateAdvisoryRawDocument("tenant-a", "nvd", "tenant-a:chunk:older", olderHash, olderRaw.DeepClone().AsBsonDocument));
await SeedCanonicalAdvisoriesAsync(
CreateStructuredAdvisory("CVE-2025-0001", "GHSA-2025-0001", "tenant-a:chunk:newest", newerCreatedAt));
using var client = _factory.CreateClient();
var response = await client.GetAsync("/advisories/cve-2025-0001/chunks?tenant=tenant-a&section=summary&format=csaf");
var response = await client.GetAsync("/advisories/cve-2025-0001/chunks?tenant=tenant-a&section=workaround");
response.EnsureSuccessStatusCode();
var payload = await response.Content.ReadAsStringAsync();
using var document = JsonDocument.Parse(payload);
var root = document.RootElement;
Assert.Equal("cve-2025-0001", root.GetProperty("advisoryKey").GetString());
Assert.Equal("CVE-2025-0001", root.GetProperty("advisoryKey").GetString());
Assert.Equal(1, root.GetProperty("total").GetInt32());
Assert.False(root.GetProperty("truncated").GetBoolean());
var chunk = Assert.Single(root.GetProperty("chunks").EnumerateArray());
Assert.Equal("summary", chunk.GetProperty("section").GetString());
Assert.Equal("summary.intro", chunk.GetProperty("paragraphId").GetString());
var text = chunk.GetProperty("text").GetString();
Assert.False(string.IsNullOrWhiteSpace(text));
Assert.Contains("deterministic summary paragraph", text, StringComparison.OrdinalIgnoreCase);
var entry = Assert.Single(root.GetProperty("entries").EnumerateArray());
Assert.Equal("workaround", entry.GetProperty("type").GetString());
Assert.Equal("tenant-a:chunk:newest", entry.GetProperty("documentId").GetString());
Assert.Equal("/references/0", entry.GetProperty("fieldPath").GetString());
Assert.False(string.IsNullOrWhiteSpace(entry.GetProperty("chunkId").GetString()));
var metadata = chunk.GetProperty("metadata");
Assert.Equal("summary.intro", metadata.GetProperty("path").GetString());
Assert.Equal("csaf", metadata.GetProperty("format").GetString());
var content = entry.GetProperty("content");
Assert.Equal("Vendor guidance", content.GetProperty("title").GetString());
Assert.Equal("Apply configuration change immediately.", content.GetProperty("description").GetString());
Assert.Equal("https://vendor.example/workaround", content.GetProperty("url").GetString());
var sources = root.GetProperty("sources").EnumerateArray().ToArray();
Assert.Equal(2, sources.Length);
Assert.Equal("tenant-a:chunk:newest", sources[0].GetProperty("observationId").GetString());
Assert.Equal("tenant-a:chunk:older", sources[1].GetProperty("observationId").GetString());
Assert.All(
sources,
source => Assert.True(string.Equals("csaf", source.GetProperty("format").GetString(), StringComparison.OrdinalIgnoreCase)));
var provenance = entry.GetProperty("provenance");
Assert.Equal("nvd", provenance.GetProperty("source").GetString());
Assert.Equal("workaround", provenance.GetProperty("kind").GetString());
Assert.Equal("tenant-a:chunk:newest", provenance.GetProperty("value").GetString());
Assert.Contains(
"/references/0",
provenance.GetProperty("fieldMask").EnumerateArray().Select(element => element.GetString()));
}
[Fact]
public async Task AdvisoryChunksEndpoint_ReturnsNotFoundWhenAdvisoryMissing()
{
await SeedObservationDocumentsAsync(BuildSampleObservationDocuments());
await SeedCanonicalAdvisoriesAsync();
using var client = _factory.CreateClient();
var response = await client.GetAsync("/advisories/cve-2099-9999/chunks?tenant=tenant-a");
@@ -526,6 +531,12 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
public async Task AdvisoryChunksEndpoint_EmitsRequestAndCacheMetrics()
{
await SeedObservationDocumentsAsync(BuildSampleObservationDocuments());
await SeedCanonicalAdvisoriesAsync(
CreateStructuredAdvisory(
"CVE-2025-0001",
"GHSA-2025-0001",
"tenant-a:nvd:alpha:1",
new DateTimeOffset(2025, 1, 5, 0, 0, 0, TimeSpan.Zero)));
using var client = _factory.CreateClient();
@@ -588,6 +599,12 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
raw);
await SeedObservationDocumentsAsync(new[] { document });
await SeedCanonicalAdvisoriesAsync(
CreateStructuredAdvisory(
"CVE-2025-GUARD",
"GHSA-2025-GUARD",
"tenant-a:chunk:1",
new DateTimeOffset(2025, 2, 1, 0, 0, 0, TimeSpan.Zero)));
using var client = _factory.CreateClient();
@@ -1936,6 +1953,111 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
}
}
[Fact]
public async Task StatementProvenanceEndpointAttachesMetadata()
{
var tenant = "tenant-provenance";
var vulnerabilityKey = "CVE-2025-9200";
var statementId = Guid.NewGuid();
var recordedAt = DateTimeOffset.Parse("2025-03-01T00:00:00Z", CultureInfo.InvariantCulture);
using (var scope = _factory.Services.CreateScope())
{
var eventLog = scope.ServiceProvider.GetRequiredService<IAdvisoryEventLog>();
var advisory = new Advisory(
advisoryKey: vulnerabilityKey,
title: "Provenance seed",
summary: "Ready for DSSE metadata",
language: "en",
published: recordedAt.AddDays(-1),
modified: recordedAt,
severity: "high",
exploitKnown: false,
aliases: new[] { vulnerabilityKey },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: Array.Empty<AdvisoryProvenance>());
var statementInput = new AdvisoryStatementInput(
vulnerabilityKey,
advisory,
recordedAt,
InputDocumentIds: Array.Empty<Guid>(),
StatementId: statementId,
AdvisoryKey: advisory.AdvisoryKey);
await eventLog.AppendAsync(new AdvisoryEventAppendRequest(new[] { statementInput }), CancellationToken.None);
}
try
{
using var client = _factory.CreateClient();
client.DefaultRequestHeaders.Add("X-Stella-Tenant", tenant);
var response = await client.PostAsync(
$"/events/statements/{statementId}/provenance?tenant={tenant}",
new StringContent(BuildProvenancePayload(), Encoding.UTF8, "application/json"));
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
using var validationScope = _factory.Services.CreateScope();
var database = validationScope.ServiceProvider.GetRequiredService<IMongoDatabase>();
var statements = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements);
var stored = await statements
.Find(Builders<BsonDocument>.Filter.Eq("_id", statementId.ToString()))
.FirstOrDefaultAsync();
Assert.NotNull(stored);
var dsse = stored!["provenance"].AsBsonDocument["dsse"].AsBsonDocument;
Assert.Equal("sha256:feedface", dsse["envelopeDigest"].AsString);
var trustDoc = stored["trust"].AsBsonDocument;
Assert.True(trustDoc["verified"].AsBoolean);
Assert.Equal("Authority@stella", trustDoc["verifier"].AsString);
}
finally
{
using var cleanupScope = _factory.Services.CreateScope();
var database = cleanupScope.ServiceProvider.GetRequiredService<IMongoDatabase>();
var statements = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryStatements);
await statements.DeleteOneAsync(Builders<BsonDocument>.Filter.Eq("_id", statementId.ToString()));
}
}
private static string BuildProvenancePayload()
{
var payload = new
{
dsse = new
{
envelopeDigest = "sha256:feedface",
payloadType = "application/vnd.in-toto+json",
key = new
{
keyId = "cosign:SHA256-PKIX:fixture",
issuer = "Authority@stella",
algo = "Ed25519"
},
rekor = new
{
logIndex = 1337,
uuid = "11111111-2222-3333-4444-555555555555",
integratedTime = 1731081600
}
},
trust = new
{
verified = true,
verifier = "Authority@stella",
witnesses = 1,
policyScore = 1.0
}
};
return JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web));
}
private sealed class TempDirectory : IDisposable
{
public string Path { get; }
@@ -1978,6 +2100,121 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
private sealed record ProblemDocument(string? Type, string? Title, int? Status, string? Detail, string? Instance);
private async Task SeedCanonicalAdvisoriesAsync(params Advisory[] advisories)
{
using var scope = _factory.Services.CreateScope();
var database = scope.ServiceProvider.GetRequiredService<IMongoDatabase>();
await DropCollectionIfExistsAsync(database, MongoStorageDefaults.Collections.Advisory);
await DropCollectionIfExistsAsync(database, MongoStorageDefaults.Collections.Alias);
if (advisories.Length == 0)
{
return;
}
var store = scope.ServiceProvider.GetRequiredService<IAdvisoryStore>();
foreach (var advisory in advisories)
{
await store.UpsertAsync(advisory, CancellationToken.None);
}
}
private static async Task DropCollectionIfExistsAsync(IMongoDatabase database, string collectionName)
{
try
{
await database.DropCollectionAsync(collectionName);
}
catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase))
{
}
}
private static Advisory CreateStructuredAdvisory(
string advisoryKey,
string alias,
string observationId,
DateTimeOffset recordedAt)
{
const string WorkaroundTitle = "Vendor guidance";
const string WorkaroundSummary = "Apply configuration change immediately.";
const string WorkaroundUrl = "https://vendor.example/workaround";
var reference = new AdvisoryReference(
WorkaroundUrl,
kind: "workaround",
sourceTag: WorkaroundTitle,
summary: WorkaroundSummary,
new AdvisoryProvenance(
"nvd",
"workaround",
observationId,
recordedAt,
new[] { "/references/0" }));
var affectedRange = new AffectedVersionRange(
rangeKind: "semver",
introducedVersion: "1.0.0",
fixedVersion: "1.1.0",
lastAffectedVersion: null,
rangeExpression: ">=1.0.0,<1.1.0",
new AdvisoryProvenance(
"nvd",
"affected",
observationId,
recordedAt,
new[] { "/affectedPackages/0/versionRanges/0" }));
var affectedPackage = new AffectedPackage(
type: AffectedPackageTypes.SemVer,
identifier: "pkg:npm/demo",
versionRanges: new[] { affectedRange },
statuses: Array.Empty<AffectedPackageStatus>(),
provenance: new[]
{
new AdvisoryProvenance(
"nvd",
"affected",
observationId,
recordedAt,
new[] { "/affectedPackages/0" })
},
normalizedVersions: Array.Empty<NormalizedVersionRule>());
var cvss = new CvssMetric(
"3.1",
"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
9.8,
"critical",
new AdvisoryProvenance(
"nvd",
"cvss",
observationId,
recordedAt,
new[] { "/cvssMetrics/0" }));
var advisory = new Advisory(
advisoryKey,
title: "Fixture advisory",
summary: "Structured payload fixture",
language: "en",
published: recordedAt,
modified: recordedAt,
severity: "critical",
exploitKnown: false,
aliases: string.IsNullOrWhiteSpace(alias) ? new[] { advisoryKey } : new[] { advisoryKey, alias },
references: new[] { reference },
affectedPackages: new[] { affectedPackage },
cvssMetrics: new[] { cvss },
provenance: new[]
{
new AdvisoryProvenance("nvd", "advisory", observationId, recordedAt)
});
return advisory;
}
private async Task SeedAdvisoryRawDocumentsAsync(params BsonDocument[] documents)
{
var client = new MongoClient(_runner.ConnectionString);

View File

@@ -7,6 +7,7 @@ using OpenTelemetry.Metrics;
using OpenTelemetry.Resources;
using OpenTelemetry.Trace;
using StellaOps.Excititor.WebService.Options;
using StellaOps.Excititor.WebService.Telemetry;
using StellaOps.Ingestion.Telemetry;
namespace StellaOps.Excititor.WebService.Extensions;
@@ -64,6 +65,7 @@ internal static class TelemetryExtensions
{
metrics
.AddMeter(IngestionTelemetry.MeterName)
.AddMeter(EvidenceTelemetry.MeterName)
.AddAspNetCoreInstrumentation()
.AddHttpClientInstrumentation()
.AddRuntimeInstrumentation();

View File

@@ -29,6 +29,7 @@ using StellaOps.Excititor.WebService.Options;
using StellaOps.Excititor.WebService.Services;
using StellaOps.Excititor.Core.Aoc;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Telemetry;
using MongoDB.Driver;
using MongoDB.Bson;
@@ -216,6 +217,7 @@ app.MapPost("/ingest/vex", async (
}
catch (ExcititorAocGuardException guardException)
{
EvidenceTelemetry.RecordGuardViolations(tenant, "ingest", guardException);
logger.LogWarning(
guardException,
"AOC guard rejected VEX ingest tenant={Tenant} digest={Digest}",
@@ -478,8 +480,27 @@ app.MapGet("/v1/vex/observations/{vulnerabilityId}/{productKey}", async (
since,
limit);
var result = await projectionService.QueryAsync(request, cancellationToken).ConfigureAwait(false);
var statements = result.Statements
VexObservationProjectionResult result;
try
{
result = await projectionService.QueryAsync(request, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException)
{
EvidenceTelemetry.RecordObservationOutcome(tenant, "cancelled");
throw;
}
catch
{
EvidenceTelemetry.RecordObservationOutcome(tenant, "error");
throw;
}
var projectionStatements = result.Statements;
EvidenceTelemetry.RecordObservationOutcome(tenant, "success", projectionStatements.Count, result.Truncated);
EvidenceTelemetry.RecordSignatureStatus(tenant, projectionStatements);
var statements = projectionStatements
.Select(ToResponse)
.ToList();
@@ -575,6 +596,7 @@ app.MapPost("/aoc/verify", async (
}
catch (ExcititorAocGuardException guardException)
{
EvidenceTelemetry.RecordGuardViolations(tenant, "aoc_verify", guardException);
checkedCount++;
foreach (var violation in guardException.Violations)
{

View File

@@ -6,6 +6,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
namespace StellaOps.Excititor.WebService.Services;

View File

@@ -0,0 +1,154 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using StellaOps.Excititor.Core.Aoc;
using StellaOps.Excititor.WebService.Services;
namespace StellaOps.Excititor.WebService.Telemetry;
internal static class EvidenceTelemetry
{
public const string MeterName = "StellaOps.Excititor.WebService.Evidence";
private static readonly Meter Meter = new(MeterName);
private static readonly Counter<long> ObservationRequestCounter =
Meter.CreateCounter<long>(
"excititor.vex.observation.requests",
unit: "requests",
description: "Number of observation projection requests handled by the evidence APIs.");
private static readonly Histogram<int> ObservationStatementHistogram =
Meter.CreateHistogram<int>(
"excititor.vex.observation.statement_count",
unit: "statements",
description: "Distribution of statements returned per observation projection request.");
private static readonly Counter<long> SignatureStatusCounter =
Meter.CreateCounter<long>(
"excititor.vex.signature.status",
unit: "statements",
description: "Signature verification status counts for observation statements.");
private static readonly Counter<long> GuardViolationCounter =
Meter.CreateCounter<long>(
"excititor.vex.aoc.guard_violations",
unit: "violations",
description: "Aggregated count of AOC guard violations detected by Excititor evidence APIs.");
public static void RecordObservationOutcome(string? tenant, string outcome, int returnedCount = 0, bool truncated = false)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("outcome", outcome),
new KeyValuePair<string, object?>("truncated", truncated),
};
ObservationRequestCounter.Add(1, tags);
if (!string.Equals(outcome, "success", StringComparison.OrdinalIgnoreCase))
{
return;
}
ObservationStatementHistogram.Record(
returnedCount,
new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("outcome", outcome),
});
}
public static void RecordSignatureStatus(string? tenant, IReadOnlyList<VexObservationStatementProjection> statements)
{
if (statements is null || statements.Count == 0)
{
return;
}
var normalizedTenant = NormalizeTenant(tenant);
var missing = 0;
var unverified = 0;
foreach (var statement in statements)
{
var signature = statement.Signature;
if (signature is null)
{
missing++;
continue;
}
if (signature.VerifiedAt is null)
{
unverified++;
}
}
if (missing > 0)
{
SignatureStatusCounter.Add(
missing,
new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("status", "missing"),
});
}
if (unverified > 0)
{
SignatureStatusCounter.Add(
unverified,
new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("status", "unverified"),
});
}
}
public static void RecordGuardViolations(string? tenant, string surface, ExcititorAocGuardException exception)
{
var normalizedTenant = NormalizeTenant(tenant);
var normalizedSurface = NormalizeSurface(surface);
if (exception.Violations.IsDefaultOrEmpty)
{
GuardViolationCounter.Add(
1,
new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("surface", normalizedSurface),
new KeyValuePair<string, object?>("code", exception.PrimaryErrorCode),
});
return;
}
foreach (var violation in exception.Violations)
{
var code = string.IsNullOrWhiteSpace(violation.ErrorCode)
? exception.PrimaryErrorCode
: violation.ErrorCode;
GuardViolationCounter.Add(
1,
new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("surface", normalizedSurface),
new KeyValuePair<string, object?>("code", code),
});
}
}
private static string NormalizeTenant(string? tenant)
=> string.IsNullOrWhiteSpace(tenant) ? "default" : tenant;
private static string NormalizeSurface(string? surface)
=> string.IsNullOrWhiteSpace(surface) ? "unknown" : surface.ToLowerInvariant();
}

View File

@@ -0,0 +1,30 @@
using System.Diagnostics.Metrics;
namespace StellaOps.Findings.Ledger.Observability;
internal static class LedgerMetrics
{
private static readonly Meter Meter = new("StellaOps.Findings.Ledger");
private static readonly Histogram<double> WriteLatencySeconds = Meter.CreateHistogram<double>(
"ledger_write_latency_seconds",
unit: "s",
description: "Latency of successful ledger append operations.");
private static readonly Counter<long> EventsTotal = Meter.CreateCounter<long>(
"ledger_events_total",
description: "Number of ledger events appended.");
public static void RecordWriteSuccess(TimeSpan duration, string? tenantId, string? eventType, string? source)
{
var tags = new TagList
{
{ "tenant", tenantId ?? string.Empty },
{ "event_type", eventType ?? string.Empty },
{ "source", source ?? string.Empty }
};
WriteLatencySeconds.Record(duration.TotalSeconds, tags);
EventsTotal.Add(1, tags);
}
}

View File

@@ -1,8 +1,10 @@
using System.Diagnostics;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Services;
@@ -29,6 +31,8 @@ public sealed class LedgerEventWriteService : ILedgerEventWriteService
public async Task<LedgerWriteResult> AppendAsync(LedgerEventDraft draft, CancellationToken cancellationToken)
{
var stopwatch = Stopwatch.StartNew();
var validationErrors = ValidateDraft(draft);
if (validationErrors.Count > 0)
{
@@ -95,6 +99,9 @@ public sealed class LedgerEventWriteService : ILedgerEventWriteService
{
await _repository.AppendAsync(record, cancellationToken).ConfigureAwait(false);
await _merkleAnchorScheduler.EnqueueAsync(record, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
LedgerMetrics.RecordWriteSuccess(stopwatch.Elapsed, draft.TenantId, draft.EventType, DetermineSource(draft));
}
catch (Exception ex) when (IsDuplicateKeyException(ex))
{
@@ -116,6 +123,21 @@ public sealed class LedgerEventWriteService : ILedgerEventWriteService
return LedgerWriteResult.Success(record);
}
private static string DetermineSource(LedgerEventDraft draft)
{
if (draft.SourceRunId.HasValue)
{
return "policy_run";
}
return draft.ActorType switch
{
"operator" => "workflow",
"integration" => "integration",
_ => "system"
};
}
private static bool IsDuplicateKeyException(Exception exception)
{
if (exception is null)

View File

@@ -168,12 +168,16 @@ internal static class ScanEndpoints
var snapshot = await coordinator.GetAsync(parsed, context.RequestAborted).ConfigureAwait(false);
if (snapshot is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"Scan not found",
StatusCodes.Status404NotFound,
detail: "Requested scan could not be located.");
snapshot = await TryResolveSnapshotAsync(scanId, coordinator, cancellationToken).ConfigureAwait(false);
if (snapshot is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"Scan not found",
StatusCodes.Status404NotFound,
detail: "Requested scan could not be located.");
}
}
SurfacePointersDto? surfacePointers = null;
@@ -282,10 +286,12 @@ internal static class ScanEndpoints
private static async Task<IResult> HandleEntryTraceAsync(
string scanId,
IScanCoordinator coordinator,
IEntryTraceResultStore resultStore,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(coordinator);
ArgumentNullException.ThrowIfNull(resultStore);
if (!ScanId.TryParse(scanId, out var parsed))
@@ -298,15 +304,25 @@ internal static class ScanEndpoints
detail: "Scan identifier is required.");
}
var result = await resultStore.GetAsync(parsed.Value, cancellationToken).ConfigureAwait(false);
var targetScanId = parsed.Value;
var result = await resultStore.GetAsync(targetScanId, cancellationToken).ConfigureAwait(false);
if (result is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"EntryTrace not found",
StatusCodes.Status404NotFound,
detail: "EntryTrace data is not available for the requested scan.");
var snapshot = await TryResolveSnapshotAsync(scanId, coordinator, cancellationToken).ConfigureAwait(false);
if (snapshot is not null && !string.Equals(snapshot.ScanId.Value, targetScanId, StringComparison.Ordinal))
{
result = await resultStore.GetAsync(snapshot.ScanId.Value, cancellationToken).ConfigureAwait(false);
}
if (result is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"EntryTrace not found",
StatusCodes.Status404NotFound,
detail: "EntryTrace data is not available for the requested scan.");
}
}
var response = new EntryTraceResponse(
@@ -321,10 +337,12 @@ internal static class ScanEndpoints
private static async Task<IResult> HandleRubyPackagesAsync(
string scanId,
IScanCoordinator coordinator,
IRubyPackageInventoryStore inventoryStore,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(coordinator);
ArgumentNullException.ThrowIfNull(inventoryStore);
if (!ScanId.TryParse(scanId, out var parsed))
@@ -340,12 +358,27 @@ internal static class ScanEndpoints
var inventory = await inventoryStore.GetAsync(parsed.Value, cancellationToken).ConfigureAwait(false);
if (inventory is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"Ruby packages not found",
StatusCodes.Status404NotFound,
detail: "Ruby package inventory is not available for the requested scan.");
RubyPackageInventory? fallback = null;
if (!LooksLikeScanId(scanId))
{
var snapshot = await TryResolveSnapshotAsync(scanId, coordinator, cancellationToken).ConfigureAwait(false);
if (snapshot is not null)
{
fallback = await inventoryStore.GetAsync(snapshot.ScanId.Value, cancellationToken).ConfigureAwait(false);
}
}
if (fallback is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"Ruby packages not found",
StatusCodes.Status404NotFound,
detail: "Ruby package inventory is not available for the requested scan.");
}
inventory = fallback;
}
var response = new RubyPackagesResponse
@@ -420,4 +453,130 @@ internal static class ScanEndpoints
var trimmed = segment.Trim('/');
return "/" + trimmed;
}
private static async ValueTask<ScanSnapshot?> TryResolveSnapshotAsync(
string identifier,
IScanCoordinator coordinator,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(coordinator);
if (string.IsNullOrWhiteSpace(identifier))
{
return null;
}
var trimmed = identifier.Trim();
var decoded = Uri.UnescapeDataString(trimmed);
if (LooksLikeScanId(decoded))
{
return null;
}
var (reference, digest) = ExtractTargetHints(decoded);
if (reference is null && digest is null)
{
return null;
}
return await coordinator.TryFindByTargetAsync(reference, digest, cancellationToken).ConfigureAwait(false);
}
private static (string? Reference, string? Digest) ExtractTargetHints(string identifier)
{
if (string.IsNullOrWhiteSpace(identifier))
{
return (null, null);
}
var trimmed = identifier.Trim();
if (TryExtractDigest(trimmed, out var digest, out var reference))
{
return (reference, digest);
}
return (trimmed, null);
}
private static bool TryExtractDigest(string candidate, out string? digest, out string? reference)
{
var atIndex = candidate.IndexOf('@');
if (atIndex >= 0 && atIndex < candidate.Length - 1)
{
var digestCandidate = candidate[(atIndex + 1)..];
if (IsDigestValue(digestCandidate))
{
digest = digestCandidate.ToLowerInvariant();
reference = candidate[..atIndex].Trim();
if (string.IsNullOrWhiteSpace(reference))
{
reference = null;
}
return true;
}
}
if (IsDigestValue(candidate))
{
digest = candidate.ToLowerInvariant();
reference = null;
return true;
}
digest = null;
reference = null;
return false;
}
private static bool IsDigestValue(string value)
{
var separatorIndex = value.IndexOf(':');
if (separatorIndex <= 0 || separatorIndex >= value.Length - 1)
{
return false;
}
var algorithm = value[..separatorIndex];
var digestPart = value[(separatorIndex + 1)..];
if (string.IsNullOrWhiteSpace(algorithm) || string.IsNullOrWhiteSpace(digestPart) || digestPart.Length < 32)
{
return false;
}
foreach (var c in digestPart)
{
if (!IsHexChar(c))
{
return false;
}
}
return true;
}
private static bool LooksLikeScanId(string value)
{
if (value.Length != 40)
{
return false;
}
foreach (var c in value)
{
if (!IsHexChar(c))
{
return false;
}
}
return true;
}
private static bool IsHexChar(char c)
=> (c >= '0' && c <= '9')
|| (c >= 'a' && c <= 'f')
|| (c >= 'A' && c <= 'F');
}

View File

@@ -2,9 +2,11 @@ using StellaOps.Scanner.WebService.Domain;
namespace StellaOps.Scanner.WebService.Services;
public interface IScanCoordinator
{
ValueTask<ScanSubmissionResult> SubmitAsync(ScanSubmission submission, CancellationToken cancellationToken);
ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken);
}
public interface IScanCoordinator
{
ValueTask<ScanSubmissionResult> SubmitAsync(ScanSubmission submission, CancellationToken cancellationToken);
ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken);
ValueTask<ScanSnapshot?> TryFindByTargetAsync(string? reference, string? digest, CancellationToken cancellationToken);
}

View File

@@ -7,11 +7,13 @@ namespace StellaOps.Scanner.WebService.Services;
public sealed class InMemoryScanCoordinator : IScanCoordinator
{
private sealed record ScanEntry(ScanSnapshot Snapshot);
private readonly ConcurrentDictionary<string, ScanEntry> scans = new(StringComparer.OrdinalIgnoreCase);
private readonly TimeProvider timeProvider;
private readonly IScanProgressPublisher progressPublisher;
private sealed record ScanEntry(ScanSnapshot Snapshot);
private readonly ConcurrentDictionary<string, ScanEntry> scans = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, string> scansByDigest = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<string, string> scansByReference = new(StringComparer.OrdinalIgnoreCase);
private readonly TimeProvider timeProvider;
private readonly IScanProgressPublisher progressPublisher;
public InMemoryScanCoordinator(TimeProvider timeProvider, IScanProgressPublisher progressPublisher)
{
@@ -37,12 +39,12 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
eventData[$"meta.{pair.Key}"] = pair.Value;
}
ScanEntry entry = scans.AddOrUpdate(
scanId.Value,
_ => new ScanEntry(new ScanSnapshot(
scanId,
normalizedTarget,
ScanStatus.Pending,
ScanEntry entry = scans.AddOrUpdate(
scanId.Value,
_ => new ScanEntry(new ScanSnapshot(
scanId,
normalizedTarget,
ScanStatus.Pending,
now,
now,
null)),
@@ -59,22 +61,87 @@ public sealed class InMemoryScanCoordinator : IScanCoordinator
return new ScanEntry(snapshot);
}
return existing;
});
var created = entry.Snapshot.CreatedAt == now;
var state = entry.Snapshot.Status.ToString();
progressPublisher.Publish(scanId, state, created ? "queued" : "requeued", eventData);
return ValueTask.FromResult(new ScanSubmissionResult(entry.Snapshot, created));
}
return existing;
});
IndexTarget(scanId.Value, normalizedTarget);
var created = entry.Snapshot.CreatedAt == now;
var state = entry.Snapshot.Status.ToString();
progressPublisher.Publish(scanId, state, created ? "queued" : "requeued", eventData);
return ValueTask.FromResult(new ScanSubmissionResult(entry.Snapshot, created));
}
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
{
if (scans.TryGetValue(scanId.Value, out var entry))
{
return ValueTask.FromResult<ScanSnapshot?>(entry.Snapshot);
}
return ValueTask.FromResult<ScanSnapshot?>(null);
}
}
if (scans.TryGetValue(scanId.Value, out var entry))
{
return ValueTask.FromResult<ScanSnapshot?>(entry.Snapshot);
}
return ValueTask.FromResult<ScanSnapshot?>(null);
}
public ValueTask<ScanSnapshot?> TryFindByTargetAsync(string? reference, string? digest, CancellationToken cancellationToken)
{
if (!string.IsNullOrWhiteSpace(digest))
{
var normalizedDigest = NormalizeDigest(digest);
if (normalizedDigest is not null &&
scansByDigest.TryGetValue(normalizedDigest, out var digestScanId) &&
scans.TryGetValue(digestScanId, out var digestEntry))
{
return ValueTask.FromResult<ScanSnapshot?>(digestEntry.Snapshot);
}
}
if (!string.IsNullOrWhiteSpace(reference))
{
var normalizedReference = NormalizeReference(reference);
if (normalizedReference is not null &&
scansByReference.TryGetValue(normalizedReference, out var referenceScanId) &&
scans.TryGetValue(referenceScanId, out var referenceEntry))
{
return ValueTask.FromResult<ScanSnapshot?>(referenceEntry.Snapshot);
}
}
return ValueTask.FromResult<ScanSnapshot?>(null);
}
private void IndexTarget(string scanId, ScanTarget target)
{
if (!string.IsNullOrWhiteSpace(target.Digest))
{
scansByDigest[target.Digest!] = scanId;
}
if (!string.IsNullOrWhiteSpace(target.Reference))
{
scansByReference[target.Reference!] = scanId;
}
}
private static string? NormalizeDigest(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
var trimmed = value.Trim();
return trimmed.Contains(':', StringComparison.Ordinal)
? trimmed.ToLowerInvariant()
: null;
}
private static string? NormalizeReference(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return value.Trim();
}
}

View File

@@ -2,7 +2,7 @@
| Task ID | State | Notes |
| --- | --- | --- |
| `SCANNER-ENG-0009` | DOING (2025-11-12) | Added bundler-version metadata + observation summaries, richer CLI output, and the `complex-app` fixture to drive parity validation. |
| `SCANNER-ENG-0009` | DONE (2025-11-13) | Ruby analyzer parity landed end-to-end: Mongo-backed `ruby.packages` inventories, WebService `/api/scans/{scanId}/ruby-packages`, CLI `ruby resolve` + observations, plugin manifest packaging, and targeted tests (`StellaOps.Scanner.Analyzers.Lang.Ruby.Tests`, `StellaOps.Scanner.Worker.Tests`, `StellaOps.Scanner.WebService.Tests --filter FullyQualifiedName~RubyPackages`). |
| `SCANNER-ENG-0016` | DONE (2025-11-10) | RubyLockCollector merged with vendor cache ingestion; workspace overrides, bundler groups, git/path fixture, and offline-kit mirror updated. |
| `SCANNER-ENG-0017` | DONE (2025-11-09) | Build runtime require/autoload graph builder with tree-sitter Ruby per design §4.4, feed EntryTrace hints. |
| `SCANNER-ENG-0018` | DONE (2025-11-09) | Emit Ruby capability + framework surface signals, align with design §4.5 / Sprint 138. |

View File

@@ -0,0 +1,327 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Net;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.EntryTrace;
using StellaOps.Scanner.EntryTrace.Serialization;
using StellaOps.Scanner.Storage.Catalog;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.WebService.Services;
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
public sealed class RubyPackagesEndpointsTests
{
[Fact]
public async Task GetRubyPackagesReturnsNotFoundWhenInventoryMissing()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/scans/scan-ruby-missing/ruby-packages");
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
}
[Fact]
public async Task GetRubyPackagesReturnsInventory()
{
const string scanId = "scan-ruby-existing";
const string digest = "sha256:feedfacefeedfacefeedfacefeedfacefeedfacefeedfacefeedfacefeedface";
var generatedAt = DateTime.UtcNow.AddMinutes(-10);
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory();
using (var serviceScope = factory.Services.CreateScope())
{
var repository = serviceScope.ServiceProvider.GetRequiredService<RubyPackageInventoryRepository>();
var document = new RubyPackageInventoryDocument
{
ScanId = scanId,
ImageDigest = digest,
GeneratedAtUtc = generatedAt,
Packages = new List<RubyPackageDocument>
{
new()
{
Id = "pkg:gem/rack@3.1.0",
Name = "rack",
Version = "3.1.0",
Source = "rubygems",
Platform = "ruby",
Groups = new List<string> { "default" },
RuntimeUsed = true,
Provenance = new RubyPackageProvenance("rubygems", "Gemfile.lock", "Gemfile.lock")
}
}
};
await repository.UpsertAsync(document, CancellationToken.None);
}
using var client = factory.CreateClient();
var response = await client.GetAsync($"/api/v1/scans/{scanId}/ruby-packages");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<RubyPackagesResponse>();
Assert.NotNull(payload);
Assert.Equal(scanId, payload!.ScanId);
Assert.Equal(digest, payload.ImageDigest);
Assert.Single(payload.Packages);
Assert.Equal("rack", payload.Packages[0].Name);
Assert.Equal("rubygems", payload.Packages[0].Source);
}
[Fact]
public async Task GetRubyPackagesAllowsDigestIdentifier()
{
const string reference = "ghcr.io/demo/ruby-service:1.2.3";
const string digest = "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
var generatedAt = DateTime.UtcNow.AddMinutes(-5);
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory();
string? scanId = null;
using (var scope = factory.Services.CreateScope())
{
var coordinator = scope.ServiceProvider.GetRequiredService<IScanCoordinator>();
var submission = new ScanSubmission(
new ScanTarget(reference, digest),
Force: false,
ClientRequestId: null,
Metadata: new Dictionary<string, string>());
var result = await coordinator.SubmitAsync(submission, CancellationToken.None);
scanId = result.Snapshot.ScanId.Value;
var resolved = await coordinator.TryFindByTargetAsync(reference, digest, CancellationToken.None);
Assert.NotNull(resolved);
var repository = scope.ServiceProvider.GetRequiredService<RubyPackageInventoryRepository>();
var document = new RubyPackageInventoryDocument
{
ScanId = scanId,
ImageDigest = digest,
GeneratedAtUtc = generatedAt,
Packages = new List<RubyPackageDocument>
{
new()
{
Id = "pkg:gem/rails@7.1.0",
Name = "rails",
Version = "7.1.0",
Source = "rubygems"
}
}
};
await repository.UpsertAsync(document, CancellationToken.None);
}
using var client = factory.CreateClient();
var encodedDigest = Uri.EscapeDataString(digest);
var response = await client.GetAsync($"/api/v1/scans/{encodedDigest}/ruby-packages");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<RubyPackagesResponse>();
Assert.NotNull(payload);
Assert.Equal(scanId, payload!.ScanId);
Assert.Equal(digest, payload.ImageDigest);
Assert.Single(payload.Packages);
Assert.Equal("rails", payload.Packages[0].Name);
}
[Fact]
public async Task GetRubyPackagesAllowsReferenceIdentifier()
{
const string reference = "ghcr.io/demo/ruby-service:latest";
const string digest = "sha512:abcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcdefabcd";
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory();
string? scanId = null;
using (var scope = factory.Services.CreateScope())
{
var coordinator = scope.ServiceProvider.GetRequiredService<IScanCoordinator>();
var submission = new ScanSubmission(
new ScanTarget(reference, digest),
Force: false,
ClientRequestId: "cli-test",
Metadata: new Dictionary<string, string>());
var result = await coordinator.SubmitAsync(submission, CancellationToken.None);
scanId = result.Snapshot.ScanId.Value;
var resolved = await coordinator.TryFindByTargetAsync(reference, digest, CancellationToken.None);
Assert.NotNull(resolved);
var repository = scope.ServiceProvider.GetRequiredService<RubyPackageInventoryRepository>();
var document = new RubyPackageInventoryDocument
{
ScanId = scanId,
ImageDigest = digest,
GeneratedAtUtc = DateTime.UtcNow.AddMinutes(-2),
Packages = new List<RubyPackageDocument>
{
new()
{
Id = "pkg:gem/sidekiq@7.2.1",
Name = "sidekiq",
Version = "7.2.1",
Source = "rubygems"
}
}
};
await repository.UpsertAsync(document, CancellationToken.None);
}
using var client = factory.CreateClient();
var encodedReference = Uri.EscapeDataString(reference);
var response = await client.GetAsync($"/api/v1/scans/{encodedReference}/ruby-packages");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<RubyPackagesResponse>();
Assert.NotNull(payload);
Assert.Equal(scanId, payload!.ScanId);
Assert.Equal(digest, payload.ImageDigest);
Assert.Single(payload.Packages);
Assert.Equal("sidekiq", payload.Packages[0].Name);
}
[Fact]
public async Task GetEntryTraceAllowsDigestIdentifier()
{
const string reference = "ghcr.io/demo/app:2.0.0";
const string digest = "sha256:111122223333444455556666777788889999aaaabbbbccccddddeeeeffff0000";
var generatedAt = DateTimeOffset.UtcNow.AddMinutes(-1);
var plan = new EntryTracePlan(
ImmutableArray.Create("/app/bin/app"),
ImmutableDictionary<string, string>.Empty,
"/workspace",
"appuser",
"/app/bin/app",
EntryTraceTerminalType.Native,
"ruby",
0.85,
ImmutableDictionary<string, string>.Empty);
var terminal = new EntryTraceTerminal(
"/app/bin/app",
EntryTraceTerminalType.Native,
"ruby",
0.85,
ImmutableDictionary<string, string>.Empty,
"appuser",
"/workspace",
ImmutableArray<string>.Empty);
var graph = new EntryTraceGraph(
EntryTraceOutcome.Resolved,
ImmutableArray<EntryTraceNode>.Empty,
ImmutableArray<EntryTraceEdge>.Empty,
ImmutableArray<EntryTraceDiagnostic>.Empty,
ImmutableArray.Create(plan),
ImmutableArray.Create(terminal));
var ndjson = EntryTraceNdjsonWriter.Serialize(
graph,
new EntryTraceNdjsonMetadata("scan-placeholder", digest, generatedAt));
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configureServices: services =>
{
services.AddSingleton<IEntryTraceResultStore, RecordingEntryTraceResultStore>();
});
string? canonicalScanId = null;
using (var scope = factory.Services.CreateScope())
{
var coordinator = scope.ServiceProvider.GetRequiredService<IScanCoordinator>();
var submission = new ScanSubmission(
new ScanTarget(reference, digest),
Force: false,
ClientRequestId: null,
Metadata: new Dictionary<string, string>());
var result = await coordinator.SubmitAsync(submission, CancellationToken.None);
canonicalScanId = result.Snapshot.ScanId.Value;
var store = (RecordingEntryTraceResultStore)scope.ServiceProvider.GetRequiredService<IEntryTraceResultStore>();
store.Set(new EntryTraceResult(canonicalScanId, digest, generatedAt, graph, ndjson));
}
using var client = factory.CreateClient();
var encodedDigest = Uri.EscapeDataString(digest);
var response = await client.GetAsync($"/api/v1/scans/{encodedDigest}/entrytrace");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<EntryTraceResponse>();
Assert.NotNull(payload);
Assert.Equal(canonicalScanId, payload!.ScanId);
Assert.Equal(digest, payload.ImageDigest);
Assert.Equal(graph.Plans.Length, payload.Graph.Plans.Length);
}
}
internal sealed class SurfaceSecretsScope : IDisposable
{
private readonly string? _provider;
private readonly string? _root;
public SurfaceSecretsScope()
{
_provider = Environment.GetEnvironmentVariable("SURFACE_SECRETS_PROVIDER");
_root = Environment.GetEnvironmentVariable("SURFACE_SECRETS_ROOT");
Environment.SetEnvironmentVariable("SURFACE_SECRETS_PROVIDER", "file");
Environment.SetEnvironmentVariable("SURFACE_SECRETS_ROOT", Path.GetTempPath());
}
public void Dispose()
{
Environment.SetEnvironmentVariable("SURFACE_SECRETS_PROVIDER", _provider);
Environment.SetEnvironmentVariable("SURFACE_SECRETS_ROOT", _root);
}
}
internal sealed class RecordingEntryTraceResultStore : IEntryTraceResultStore
{
private readonly ConcurrentDictionary<string, EntryTraceResult> _entries = new(StringComparer.OrdinalIgnoreCase);
public void Set(EntryTraceResult result)
{
if (result is null)
{
throw new ArgumentNullException(nameof(result));
}
_entries[result.ScanId] = result;
}
public Task<EntryTraceResult?> GetAsync(string scanId, CancellationToken cancellationToken)
{
if (_entries.TryGetValue(scanId, out var value))
{
return Task.FromResult<EntryTraceResult?>(value);
}
return Task.FromResult<EntryTraceResult?>(null);
}
public Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken)
{
Set(result);
return Task.CompletedTask;
}
}

View File

@@ -1,665 +1,84 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Net;
using System.Net.Http.Json;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading.Tasks;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.AspNetCore.TestHost;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.EntryTrace;
using StellaOps.Scanner.EntryTrace.Serialization;
using StellaOps.Scanner.Storage.Catalog;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.Storage.ObjectStore;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.WebService.Services;
namespace StellaOps.Scanner.WebService.Tests;
public sealed class ScansEndpointsTests
using Xunit;
namespace StellaOps.Scanner.WebService.Tests;
public sealed partial class ScansEndpointsTests
{
[Fact]
public async Task SubmitScanReturnsAcceptedAndStatusRetrievable()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "ghcr.io/demo/app:1.0.0" },
Force = false
};
var response = await client.PostAsJsonAsync("/api/v1/scans", request);
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(payload);
Assert.False(string.IsNullOrWhiteSpace(payload!.ScanId));
Assert.Equal("Pending", payload.Status);
Assert.True(payload.Created);
Assert.False(string.IsNullOrWhiteSpace(payload.Location));
var statusResponse = await client.GetAsync(payload.Location);
Assert.Equal(HttpStatusCode.OK, statusResponse.StatusCode);
var status = await statusResponse.Content.ReadFromJsonAsync<ScanStatusResponse>();
Assert.NotNull(status);
Assert.Equal(payload.ScanId, status!.ScanId);
Assert.Equal("Pending", status.Status);
Assert.Equal("ghcr.io/demo/app:1.0.0", status.Image.Reference);
}
[Fact]
public async Task SubmitScanIsDeterministicForIdenticalPayloads()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "registry.example.com/acme/app:latest" },
Force = false,
ClientRequestId = "client-123",
Metadata = new Dictionary<string, string> { ["origin"] = "unit-test" }
};
var first = await client.PostAsJsonAsync("/api/v1/scans", request);
var firstPayload = await first.Content.ReadFromJsonAsync<ScanSubmitResponse>();
var second = await client.PostAsJsonAsync("/api/v1/scans", request);
var secondPayload = await second.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(firstPayload);
Assert.NotNull(secondPayload);
Assert.Equal(firstPayload!.ScanId, secondPayload!.ScanId);
Assert.True(firstPayload.Created);
Assert.False(secondPayload.Created);
}
[Fact]
public async Task ScanStatusIncludesSurfacePointersWhenArtifactsExist()
{
const string digest = "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
var digestValue = digest.Split(':', 2)[1];
using var factory = new ScannerApplicationFactory();
const string manifestDigest = "sha256:b2efc2d1f8b042b7f168bcb7d4e2f8e91d36b8306bd855382c5f847efc2c1111";
const string graphDigest = "sha256:9a0d4f8c7b6a5e4d3c2b1a0f9e8d7c6b5a4f3e2d1c0b9a8f7e6d5c4b3a291819";
const string ndjsonDigest = "sha256:3f2e1d0c9b8a7f6e5d4c3b2a1908f7e6d5c4b3a29181726354433221100ffeec";
const string fragmentsDigest = "sha256:aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55";
using (var scope = factory.Services.CreateScope())
{
var artifactRepository = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
var linkRepository = scope.ServiceProvider.GetRequiredService<LinkRepository>();
var now = DateTime.UtcNow;
async Task InsertAsync(
ArtifactDocumentType type,
ArtifactDocumentFormat format,
string artifactDigest,
string mediaType,
string ttlClass)
{
var artifactId = CatalogIdFactory.CreateArtifactId(type, artifactDigest);
var document = new ArtifactDocument
{
Id = artifactId,
Type = type,
Format = format,
MediaType = mediaType,
BytesSha256 = artifactDigest,
SizeBytes = 2048,
Immutable = true,
RefCount = 1,
TtlClass = ttlClass,
CreatedAtUtc = now,
UpdatedAtUtc = now
};
await artifactRepository.UpsertAsync(document, CancellationToken.None).ConfigureAwait(false);
var link = new LinkDocument
{
Id = CatalogIdFactory.CreateLinkId(LinkSourceType.Image, digest, artifactId),
FromType = LinkSourceType.Image,
FromDigest = digest,
ArtifactId = artifactId,
CreatedAtUtc = now
};
await linkRepository.UpsertAsync(link, CancellationToken.None).ConfigureAwait(false);
}
await InsertAsync(
ArtifactDocumentType.ImageBom,
ArtifactDocumentFormat.CycloneDxJson,
digest,
"application/vnd.cyclonedx+json; version=1.6; view=inventory",
"default").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceManifest,
ArtifactDocumentFormat.SurfaceManifestJson,
manifestDigest,
"application/vnd.stellaops.surface.manifest+json",
"surface.manifest").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceEntryTrace,
ArtifactDocumentFormat.EntryTraceGraphJson,
graphDigest,
"application/json",
"surface.payload").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceEntryTrace,
ArtifactDocumentFormat.EntryTraceNdjson,
ndjsonDigest,
"application/x-ndjson",
"surface.payload").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceLayerFragment,
ArtifactDocumentFormat.ComponentFragmentJson,
fragmentsDigest,
"application/json",
"surface.payload").ConfigureAwait(false);
}
using var client = factory.CreateClient();
var submitRequest = new ScanSubmitRequest
{
Image = new ScanImageDescriptor
{
Digest = digest
}
};
var submitResponse = await client.PostAsJsonAsync("/api/v1/scans", submitRequest);
submitResponse.EnsureSuccessStatusCode();
var submission = await submitResponse.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(submission);
var statusResponse = await client.GetAsync($"/api/v1/scans/{submission!.ScanId}");
statusResponse.EnsureSuccessStatusCode();
var status = await statusResponse.Content.ReadFromJsonAsync<ScanStatusResponse>();
Assert.NotNull(status);
Assert.NotNull(status!.Surface);
var surface = status.Surface!;
Assert.Equal("default", surface.Tenant);
Assert.False(string.IsNullOrWhiteSpace(surface.ManifestDigest));
Assert.NotNull(surface.ManifestUri);
Assert.Contains("cas://scanner-artifacts/", surface.ManifestUri, StringComparison.Ordinal);
var manifest = surface.Manifest;
Assert.Equal(digest, manifest.ImageDigest);
Assert.Equal(surface.Tenant, manifest.Tenant);
Assert.NotEqual(default, manifest.GeneratedAt);
var artifactsByKind = manifest.Artifacts.ToDictionary(a => a.Kind, StringComparer.Ordinal);
Assert.Equal(5, artifactsByKind.Count);
static string BuildUri(ArtifactDocumentType type, ArtifactDocumentFormat format, string digestValue)
=> $"cas://scanner-artifacts/{ArtifactObjectKeyBuilder.Build(type, format, digestValue, \"scanner\")}";
var inventory = artifactsByKind["sbom-inventory"];
Assert.Equal(digest, inventory.Digest);
Assert.Equal("cdx-json", inventory.Format);
Assert.Equal("application/vnd.cyclonedx+json; version=1.6; view=inventory", inventory.MediaType);
Assert.Equal("inventory", inventory.View);
Assert.Equal(BuildUri(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxJson, digest), inventory.Uri);
var manifestArtifact = artifactsByKind["surface.manifest"];
Assert.Equal(manifestDigest, manifestArtifact.Digest);
Assert.Equal("surface.manifest", manifestArtifact.Format);
Assert.Equal("application/vnd.stellaops.surface.manifest+json", manifestArtifact.MediaType);
Assert.Null(manifestArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceManifest, ArtifactDocumentFormat.SurfaceManifestJson, manifestDigest), manifestArtifact.Uri);
var graphArtifact = artifactsByKind["entrytrace.graph"];
Assert.Equal(graphDigest, graphArtifact.Digest);
Assert.Equal("entrytrace.graph", graphArtifact.Format);
Assert.Equal("application/json", graphArtifact.MediaType);
Assert.Null(graphArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceEntryTrace, ArtifactDocumentFormat.EntryTraceGraphJson, graphDigest), graphArtifact.Uri);
var ndjsonArtifact = artifactsByKind["entrytrace.ndjson"];
Assert.Equal(ndjsonDigest, ndjsonArtifact.Digest);
Assert.Equal("entrytrace.ndjson", ndjsonArtifact.Format);
Assert.Equal("application/x-ndjson", ndjsonArtifact.MediaType);
Assert.Null(ndjsonArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceEntryTrace, ArtifactDocumentFormat.EntryTraceNdjson, ndjsonDigest), ndjsonArtifact.Uri);
var fragmentsArtifact = artifactsByKind["layer.fragments"];
Assert.Equal(fragmentsDigest, fragmentsArtifact.Digest);
Assert.Equal("layer.fragments", fragmentsArtifact.Format);
Assert.Equal("application/json", fragmentsArtifact.MediaType);
Assert.Equal("inventory", fragmentsArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceLayerFragment, ArtifactDocumentFormat.ComponentFragmentJson, fragmentsDigest), fragmentsArtifact.Uri);
}
[Fact]
public async Task SubmitScanValidatesImageDescriptor()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new
{
image = new { reference = "", digest = "" }
};
var response = await client.PostAsJsonAsync("/api/v1/scans", request);
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
}
[Fact]
public async Task SubmitScanPropagatesRequestAbortedToken()
{
RecordingCoordinator coordinator = null!;
using var factory = new ScannerApplicationFactory(configuration =>
{
configuration["scanner:authority:enabled"] = "false";
}, services =>
{
services.AddSingleton<IScanCoordinator>(sp =>
{
coordinator = new RecordingCoordinator(
sp.GetRequiredService<IHttpContextAccessor>(),
sp.GetRequiredService<TimeProvider>(),
sp.GetRequiredService<IScanProgressPublisher>());
return coordinator;
});
});
using var client = factory.CreateClient(new WebApplicationFactoryClientOptions
{
AllowAutoRedirect = false
});
var cts = new CancellationTokenSource();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "example.com/demo:1.0" }
};
var response = await client.PostAsJsonAsync("/api/v1/scans", request, cts.Token);
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
Assert.NotNull(coordinator);
Assert.True(coordinator.TokenMatched);
Assert.True(coordinator.LastToken.CanBeCanceled);
}
[Fact]
public async Task EntryTraceEndpointReturnsStoredResult()
{
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory();
var scanId = $"scan-entrytrace-{Guid.NewGuid():n}";
var graph = new EntryTraceGraph(
EntryTraceOutcome.Resolved,
ImmutableArray<EntryTraceNode>.Empty,
ImmutableArray<EntryTraceEdge>.Empty,
ImmutableArray<EntryTraceDiagnostic>.Empty,
ImmutableArray.Create(new EntryTracePlan(
ImmutableArray.Create("/bin/bash", "-lc", "./start.sh"),
ImmutableDictionary<string, string>.Empty,
"/workspace",
"root",
"/bin/bash",
EntryTraceTerminalType.Script,
"bash",
0.9,
ImmutableDictionary<string, string>.Empty)),
ImmutableArray.Create(new EntryTraceTerminal(
"/bin/bash",
EntryTraceTerminalType.Script,
"bash",
0.9,
ImmutableDictionary<string, string>.Empty,
"root",
"/workspace",
ImmutableArray<string>.Empty)));
using var client = factory.CreateClient();
var ndjson = new List<string> { "{\"kind\":\"entry\"}" };
using (var scope = factory.Services.CreateScope())
var response = await client.PostAsJsonAsync("/api/v1/scans", new
{
var repository = scope.ServiceProvider.GetRequiredService<EntryTraceRepository>();
await repository.UpsertAsync(new EntryTraceDocument
{
ScanId = scanId,
ImageDigest = "sha256:entrytrace",
GeneratedAtUtc = DateTime.UtcNow,
GraphJson = EntryTraceGraphSerializer.Serialize(graph),
Ndjson = ndjson
}, CancellationToken.None).ConfigureAwait(false);
}
using var client = factory.CreateClient();
var response = await client.GetAsync($"/api/v1/scans/{scanId}/entrytrace");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<EntryTraceResponse>(SerializerOptions, CancellationToken.None);
Assert.NotNull(payload);
Assert.Equal(scanId, payload!.ScanId);
Assert.Equal("sha256:entrytrace", payload.ImageDigest);
Assert.Equal(graph.Outcome, payload.Graph.Outcome);
Assert.Single(payload.Graph.Plans);
Assert.Equal("/bin/bash", payload.Graph.Plans[0].TerminalPath);
Assert.Single(payload.Graph.Terminals);
Assert.Equal(ndjson, payload.Ndjson);
}
[Fact]
public async Task RubyPackagesEndpointReturnsNotFoundWhenMissing()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/scans/scan-ruby-missing/ruby-packages");
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
}
[Fact]
public async Task RubyPackagesEndpointReturnsInventory()
{
const string scanId = "scan-ruby-existing";
const string digest = "sha256:feedfacefeedfacefeedfacefeedfacefeedfacefeedfacefeedfacefeedface";
var generatedAt = DateTime.UtcNow.AddMinutes(-10);
using var factory = new ScannerApplicationFactory();
using (var scope = factory.Services.CreateScope())
{
var repository = scope.ServiceProvider.GetRequiredService<RubyPackageInventoryRepository>();
var document = new RubyPackageInventoryDocument
{
ScanId = scanId,
ImageDigest = digest,
GeneratedAtUtc = generatedAt,
Packages = new List<RubyPackageDocument>
{
new()
{
Id = "pkg:gem/rack@3.1.0",
Name = "rack",
Version = "3.1.0",
Source = "rubygems",
Platform = "ruby",
Groups = new List<string> { "default" },
RuntimeUsed = true,
Provenance = new RubyPackageProvenance("rubygems", "Gemfile.lock", "Gemfile.lock")
}
}
};
await repository.UpsertAsync(document, CancellationToken.None).ConfigureAwait(false);
}
using var client = factory.CreateClient();
var response = await client.GetAsync($"/api/v1/scans/{scanId}/ruby-packages");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<RubyPackagesResponse>();
Assert.NotNull(payload);
Assert.Equal(scanId, payload!.ScanId);
Assert.Equal(digest, payload.ImageDigest);
Assert.Single(payload.Packages);
Assert.Equal("rack", payload.Packages[0].Name);
Assert.Equal("rubygems", payload.Packages[0].Source);
}
private sealed class RecordingCoordinator : IScanCoordinator
{
private readonly IHttpContextAccessor accessor;
private readonly InMemoryScanCoordinator inner;
public RecordingCoordinator(IHttpContextAccessor accessor, TimeProvider timeProvider, IScanProgressPublisher publisher)
{
this.accessor = accessor;
inner = new InMemoryScanCoordinator(timeProvider, publisher);
}
public CancellationToken LastToken { get; private set; }
public bool TokenMatched { get; private set; }
public async ValueTask<ScanSubmissionResult> SubmitAsync(ScanSubmission submission, CancellationToken cancellationToken)
{
LastToken = cancellationToken;
TokenMatched = accessor.HttpContext?.RequestAborted.Equals(cancellationToken) ?? false;
return await inner.SubmitAsync(submission, cancellationToken);
}
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
=> inner.GetAsync(scanId, cancellationToken);
}
[Fact]
public async Task ProgressStreamReturnsInitialPendingEvent()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "ghcr.io/demo/app:2.0.0" }
};
var submit = await client.PostAsJsonAsync("/api/v1/scans", request);
var submitPayload = await submit.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(submitPayload);
var response = await client.GetAsync($"/api/v1/scans/{submitPayload!.ScanId}/events?format=jsonl", HttpCompletionOption.ResponseHeadersRead);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
Assert.Equal("application/x-ndjson", response.Content.Headers.ContentType?.MediaType);
await using var stream = await response.Content.ReadAsStreamAsync();
using var reader = new StreamReader(stream);
var line = await reader.ReadLineAsync();
Assert.False(string.IsNullOrWhiteSpace(line));
var envelope = JsonSerializer.Deserialize<ProgressEnvelope>(line!, SerializerOptions);
Assert.NotNull(envelope);
Assert.Equal(submitPayload.ScanId, envelope!.ScanId);
Assert.Equal("Pending", envelope.State);
Assert.Equal(1, envelope.Sequence);
Assert.NotEqual(default, envelope.Timestamp);
}
[Fact]
public async Task ProgressStreamYieldsSubsequentEvents()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "registry.example.com/acme/app:stream" }
};
var submit = await client.PostAsJsonAsync("/api/v1/scans", request);
var submitPayload = await submit.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(submitPayload);
var publisher = factory.Services.GetRequiredService<IScanProgressPublisher>();
var response = await client.GetAsync($"/api/v1/scans/{submitPayload!.ScanId}/events?format=jsonl", HttpCompletionOption.ResponseHeadersRead);
await using var stream = await response.Content.ReadAsStreamAsync();
using var reader = new StreamReader(stream);
var firstLine = await reader.ReadLineAsync();
Assert.NotNull(firstLine);
var firstEnvelope = JsonSerializer.Deserialize<ProgressEnvelope>(firstLine!, SerializerOptions);
Assert.NotNull(firstEnvelope);
Assert.Equal("Pending", firstEnvelope!.State);
_ = Task.Run(async () =>
{
await Task.Delay(50);
publisher.Publish(new ScanId(submitPayload.ScanId), "Running", "worker-started", new Dictionary<string, object?>
{
["stage"] = "download"
});
});
ProgressEnvelope? envelope = null;
string? line;
do
{
line = await reader.ReadLineAsync();
if (line is null)
{
break;
}
if (line.Length == 0)
{
continue;
}
envelope = JsonSerializer.Deserialize<ProgressEnvelope>(line, SerializerOptions);
}
while (envelope is not null && envelope.State == "Pending");
Assert.NotNull(envelope);
Assert.Equal("Running", envelope!.State);
Assert.True(envelope.Sequence >= 2);
Assert.Contains(envelope.Data.Keys, key => key == "stage");
}
[Fact]
public async Task ProgressStreamSupportsServerSentEvents()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "ghcr.io/demo/app:3.0.0" }
};
var submit = await client.PostAsJsonAsync("/api/v1/scans", request);
var submitPayload = await submit.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(submitPayload);
var response = await client.GetAsync($"/api/v1/scans/{submitPayload!.ScanId}/events", HttpCompletionOption.ResponseHeadersRead);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
Assert.Equal("text/event-stream", response.Content.Headers.ContentType?.MediaType);
await using var stream = await response.Content.ReadAsStreamAsync();
using var reader = new StreamReader(stream);
var idLine = await reader.ReadLineAsync();
var eventLine = await reader.ReadLineAsync();
var dataLine = await reader.ReadLineAsync();
var separator = await reader.ReadLineAsync();
Assert.Equal("id: 1", idLine);
Assert.Equal("event: pending", eventLine);
Assert.NotNull(dataLine);
Assert.StartsWith("data: ", dataLine, StringComparison.Ordinal);
Assert.Equal(string.Empty, separator);
var json = dataLine!["data: ".Length..];
var envelope = JsonSerializer.Deserialize<ProgressEnvelope>(json, SerializerOptions);
Assert.NotNull(envelope);
Assert.Equal(submitPayload.ScanId, envelope!.ScanId);
Assert.Equal("Pending", envelope.State);
Assert.Equal(1, envelope.Sequence);
Assert.True(envelope.Timestamp.UtcDateTime <= DateTime.UtcNow);
}
[Fact]
public async Task ProgressStreamDataKeysAreSortedDeterministically()
{
using var factory = new ScannerApplicationFactory();
using var client = factory.CreateClient();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "ghcr.io/demo/app:sorted" }
};
var submit = await client.PostAsJsonAsync("/api/v1/scans", request);
var submitPayload = await submit.Content.ReadFromJsonAsync<ScanSubmitResponse>();
Assert.NotNull(submitPayload);
var publisher = factory.Services.GetRequiredService<IScanProgressPublisher>();
var response = await client.GetAsync($"/api/v1/scans/{submitPayload!.ScanId}/events?format=jsonl", HttpCompletionOption.ResponseHeadersRead);
await using var stream = await response.Content.ReadAsStreamAsync();
using var reader = new StreamReader(stream);
// Drain the initial pending event.
_ = await reader.ReadLineAsync();
_ = Task.Run(async () =>
{
await Task.Delay(25);
publisher.Publish(
new ScanId(submitPayload.ScanId),
"Running",
"stage-change",
new Dictionary<string, object?>
{
["zeta"] = 1,
["alpha"] = 2,
["Beta"] = 3
});
image = new { reference = string.Empty, digest = string.Empty }
});
string? line;
JsonDocument? document = null;
while ((line = await reader.ReadLineAsync()) is not null)
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
}
[Fact]
public async Task SubmitScanPropagatesRequestAbortedToken()
{
using var secrets = new TestSurfaceSecretsScope();
RecordingCoordinator coordinator = null!;
using var factory = new ScannerApplicationFactory(configuration =>
{
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var parsed = JsonDocument.Parse(line);
if (parsed.RootElement.TryGetProperty("state", out var state) &&
string.Equals(state.GetString(), "Running", StringComparison.OrdinalIgnoreCase))
{
document = parsed;
break;
}
parsed.Dispose();
}
Assert.NotNull(document);
using (document)
configuration["scanner:authority:enabled"] = "false";
}, configureServices: services =>
{
var data = document!.RootElement.GetProperty("data");
var names = data.EnumerateObject().Select(p => p.Name).ToArray();
Assert.Equal(new[] { "alpha", "Beta", "zeta" }, names);
}
services.AddSingleton<IScanCoordinator>(sp =>
{
coordinator = new RecordingCoordinator(
sp.GetRequiredService<IHttpContextAccessor>(),
sp.GetRequiredService<TimeProvider>(),
sp.GetRequiredService<IScanProgressPublisher>());
return coordinator;
});
});
using var client = factory.CreateClient(new WebApplicationFactoryClientOptions
{
AllowAutoRedirect = false
});
using var cts = new CancellationTokenSource();
var request = new ScanSubmitRequest
{
Image = new ScanImageDescriptor { Reference = "example.com/demo:1.0" }
};
var response = await client.PostAsJsonAsync("/api/v1/scans", request, cts.Token);
Assert.Equal(HttpStatusCode.Accepted, response.StatusCode);
Assert.NotNull(coordinator);
Assert.True(coordinator!.TokenMatched);
Assert.True(coordinator.LastToken.CanBeCanceled);
}
[Fact]
public async Task GetEntryTraceReturnsStoredResult()
{
using var secrets = new TestSurfaceSecretsScope();
var scanId = $"scan-{Guid.NewGuid():n}";
var generatedAt = new DateTimeOffset(2025, 11, 1, 12, 0, 0, TimeSpan.Zero);
var generatedAt = DateTimeOffset.UtcNow;
var plan = new EntryTracePlan(
ImmutableArray.Create("/usr/local/bin/app"),
ImmutableDictionary<string, string>.Empty,
@@ -668,17 +87,19 @@ public sealed class ScansEndpointsTests
"/usr/local/bin/app",
EntryTraceTerminalType.Native,
"go",
90d,
0.9,
ImmutableDictionary<string, string>.Empty);
var terminal = new EntryTraceTerminal(
"/usr/local/bin/app",
EntryTraceTerminalType.Native,
"go",
90d,
0.9,
ImmutableDictionary<string, string>.Empty,
"appuser",
"/workspace",
ImmutableArray<string>.Empty);
var graph = new EntryTraceGraph(
EntryTraceOutcome.Resolved,
ImmutableArray<EntryTraceNode>.Empty,
@@ -686,59 +107,67 @@ public sealed class ScansEndpointsTests
ImmutableArray<EntryTraceDiagnostic>.Empty,
ImmutableArray.Create(plan),
ImmutableArray.Create(terminal));
var ndjson = EntryTraceNdjsonWriter.Serialize(
graph,
new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt));
var ndjson = EntryTraceNdjsonWriter.Serialize(graph, new EntryTraceNdjsonMetadata(scanId, "sha256:test", generatedAt));
var storedResult = new EntryTraceResult(scanId, "sha256:test", generatedAt, graph, ndjson);
using var factory = new ScannerApplicationFactory(
configureConfiguration: null,
services =>
{
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(storedResult));
});
using var factory = new ScannerApplicationFactory(configureServices: services =>
{
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(storedResult));
});
using var client = factory.CreateClient();
var response = await client.GetAsync($"/api/v1/scans/{scanId}/entrytrace");
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
var payload = await response.Content.ReadFromJsonAsync<EntryTraceResponse>(SerializerOptions, CancellationToken.None);
var payload = await response.Content.ReadFromJsonAsync<EntryTraceResponse>();
Assert.NotNull(payload);
Assert.Equal(storedResult.ScanId, payload!.ScanId);
Assert.Equal(storedResult.ImageDigest, payload.ImageDigest);
Assert.Equal(storedResult.GeneratedAtUtc, payload.GeneratedAt);
Assert.Equal(storedResult.Graph.Plans.Length, payload.Graph.Plans.Length);
Assert.Equal(storedResult.Ndjson, payload.Ndjson);
}
[Fact]
public async Task GetEntryTraceReturnsNotFoundWhenMissing()
{
using var factory = new ScannerApplicationFactory(
configureConfiguration: null,
services =>
{
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(null));
});
using var secrets = new TestSurfaceSecretsScope();
using var factory = new ScannerApplicationFactory(configureServices: services =>
{
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(null));
});
using var client = factory.CreateClient();
var response = await client.GetAsync("/api/v1/scans/scan-missing/entrytrace");
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
}
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
Converters = { new JsonStringEnumConverter() }
};
private sealed record ProgressEnvelope(
string ScanId,
int Sequence,
string State,
string? Message,
DateTimeOffset Timestamp,
string CorrelationId,
Dictionary<string, JsonElement> Data);
private sealed class RecordingCoordinator : IScanCoordinator
{
private readonly IHttpContextAccessor _accessor;
private readonly InMemoryScanCoordinator _inner;
public RecordingCoordinator(IHttpContextAccessor accessor, TimeProvider timeProvider, IScanProgressPublisher publisher)
{
_accessor = accessor;
_inner = new InMemoryScanCoordinator(timeProvider, publisher);
}
public CancellationToken LastToken { get; private set; }
public bool TokenMatched { get; private set; }
public async ValueTask<ScanSubmissionResult> SubmitAsync(ScanSubmission submission, CancellationToken cancellationToken)
{
LastToken = cancellationToken;
TokenMatched = _accessor.HttpContext?.RequestAborted.Equals(cancellationToken) ?? false;
return await _inner.SubmitAsync(submission, cancellationToken);
}
public ValueTask<ScanSnapshot?> GetAsync(ScanId scanId, CancellationToken cancellationToken)
=> _inner.GetAsync(scanId, cancellationToken);
public ValueTask<ScanSnapshot?> TryFindByTargetAsync(string? reference, string? digest, CancellationToken cancellationToken)
=> _inner.TryFindByTargetAsync(reference, digest, cancellationToken);
}
private sealed class StubEntryTraceResultStore : IEntryTraceResultStore
{
@@ -760,8 +189,6 @@ public sealed class ScansEndpointsTests
}
public Task StoreAsync(EntryTraceResult result, CancellationToken cancellationToken)
{
return Task.CompletedTask;
}
=> Task.CompletedTask;
}
}

View File

@@ -0,0 +1,23 @@
using System;
namespace StellaOps.Scanner.WebService.Tests;
internal sealed class TestSurfaceSecretsScope : IDisposable
{
private readonly string? _provider;
private readonly string? _root;
public TestSurfaceSecretsScope()
{
_provider = Environment.GetEnvironmentVariable("SURFACE_SECRETS_PROVIDER");
_root = Environment.GetEnvironmentVariable("SURFACE_SECRETS_ROOT");
Environment.SetEnvironmentVariable("SURFACE_SECRETS_PROVIDER", "file");
Environment.SetEnvironmentVariable("SURFACE_SECRETS_ROOT", Path.GetTempPath());
}
public void Dispose()
{
Environment.SetEnvironmentVariable("SURFACE_SECRETS_PROVIDER", _provider);
Environment.SetEnvironmentVariable("SURFACE_SECRETS_ROOT", _root);
}
}

View File

@@ -0,0 +1,99 @@
using System.Collections.Generic;
using System.Linq;
using MongoDB.Bson;
using StellaOps.Provenance.Mongo;
using Xunit;
namespace StellaOps.Events.Mongo.Tests;
public sealed class ProvenanceMongoExtensionsTests
{
[Fact]
public void AttachDsseProvenance_WritesNestedDocuments()
{
var document = new BsonDocument
{
{ "kind", "VEX" },
{ "subject", new BsonDocument("digest", new BsonDocument("sha256", "sha256:abc")) }
};
var dsse = new DsseProvenance
{
EnvelopeDigest = "sha256:deadbeef",
PayloadType = "application/vnd.in-toto+json",
Key = new DsseKeyInfo
{
KeyId = "cosign:SHA256-PKIX:TEST",
Issuer = "fulcio",
Algo = "ECDSA"
},
Rekor = new DsseRekorInfo
{
LogIndex = 123,
Uuid = Guid.Parse("2d4d5f7c-1111-4a01-b9cb-aa42022a0a8c").ToString(),
IntegratedTime = 1_699_999_999,
MirrorSeq = 10
},
Chain = new List<DsseChainLink>
{
new()
{
Type = "build",
Id = "att:build#1",
Digest = "sha256:chain"
}
}
};
var trust = new TrustInfo
{
Verified = true,
Verifier = "Authority@stella",
Witnesses = 2,
PolicyScore = 0.9
};
document.AttachDsseProvenance(dsse, trust);
var provenanceDoc = document["provenance"].AsBsonDocument["dsse"].AsBsonDocument;
Assert.Equal("sha256:deadbeef", provenanceDoc["envelopeDigest"].AsString);
Assert.Equal(123, provenanceDoc["rekor"].AsBsonDocument["logIndex"].AsInt64);
Assert.Equal("att:build#1", provenanceDoc["chain"].AsBsonArray.Single().AsBsonDocument["id"].AsString);
var trustDoc = document["trust"].AsBsonDocument;
Assert.True(trustDoc["verified"].AsBoolean);
Assert.Equal(2, trustDoc["witnesses"].AsInt32);
Assert.Equal(0.9, trustDoc["policyScore"].AsDouble);
}
[Fact]
public void BuildProvenVexFilter_TargetsKindSubjectAndVerified()
{
var filter = ProvenanceMongoExtensions.BuildProvenVexFilter("VEX", "sha256:123");
Assert.Equal("VEX", filter["kind"].AsString);
Assert.Equal("sha256:123", filter["subject.digest.sha256"].AsString);
Assert.True(filter.Contains("provenance.dsse.rekor.logIndex"));
Assert.True(filter.Contains("trust.verified"));
}
[Fact]
public void BuildUnprovenEvidenceFilter_FlagsMissingTrustOrRekor()
{
var filter = ProvenanceMongoExtensions.BuildUnprovenEvidenceFilter(new[] { "SBOM", "VEX" });
var kindClause = filter["kind"].AsBsonDocument["$in"].AsBsonArray.Select(v => v.AsString).ToArray();
Assert.Contains("SBOM", kindClause);
Assert.Contains("VEX", kindClause);
var orConditions = filter["$or"].AsBsonArray;
Assert.Equal(2, orConditions.Count);
var trustCondition = orConditions[0].AsBsonDocument;
Assert.Equal("$ne", trustCondition["trust.verified"].AsBsonDocument.Elements.Single().Name);
var rekorCondition = orConditions[1].AsBsonDocument;
Assert.Equal("$exists", rekorCondition["provenance.dsse.rekor.logIndex"].AsBsonDocument.Elements.Single().Name);
Assert.False(rekorCondition["provenance.dsse.rekor.logIndex"].AsBsonDocument["$exists"].AsBoolean);
}
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Events.Mongo/StellaOps.Events.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,82 @@
using System.IO;
using System.Text.Json;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Events.Mongo;
public sealed class EventProvenanceWriter
{
private readonly IMongoCollection<BsonDocument> _events;
public EventProvenanceWriter(IMongoDatabase database, string collectionName = "events")
{
if (database is null) throw new ArgumentNullException(nameof(database));
if (string.IsNullOrWhiteSpace(collectionName)) throw new ArgumentException("Collection name is required", nameof(collectionName));
_events = database.GetCollection<BsonDocument>(collectionName);
}
public Task AttachAsync(string eventId, DsseProvenance dsse, TrustInfo trust, CancellationToken cancellationToken = default)
{
var filter = BuildIdFilter(eventId);
return AttachAsync(filter, dsse, trust, cancellationToken);
}
public async Task AttachAsync(FilterDefinition<BsonDocument> filter, DsseProvenance dsse, TrustInfo trust, CancellationToken cancellationToken = default)
{
if (filter is null) throw new ArgumentNullException(nameof(filter));
if (dsse is null) throw new ArgumentNullException(nameof(dsse));
if (trust is null) throw new ArgumentNullException(nameof(trust));
var update = BuildUpdateDefinition(dsse, trust);
var result = await _events.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false);
if (result.MatchedCount == 0)
{
throw new InvalidOperationException("Target event document not found.");
}
}
public async Task AttachFromJsonAsync(string eventId, string provenanceMetaJson, TrustInfo? trustOverride = null, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(provenanceMetaJson)) throw new ArgumentException("JSON payload is required.", nameof(provenanceMetaJson));
using var document = JsonDocument.Parse(provenanceMetaJson);
await AttachFromJsonElementAsync(eventId, document.RootElement, trustOverride, cancellationToken).ConfigureAwait(false);
}
public async Task AttachFromJsonAsync(string eventId, Stream provenanceMetaStream, TrustInfo? trustOverride = null, CancellationToken cancellationToken = default)
{
if (provenanceMetaStream is null) throw new ArgumentNullException(nameof(provenanceMetaStream));
var (dsse, trust) = await ProvenanceJsonParser.ParseAsync(provenanceMetaStream, trustOverride, cancellationToken).ConfigureAwait(false);
await AttachAsync(eventId, dsse, trust, cancellationToken).ConfigureAwait(false);
}
private Task AttachFromJsonElementAsync(string eventId, JsonElement root, TrustInfo? trustOverride, CancellationToken cancellationToken)
{
var (dsse, trust) = ProvenanceJsonParser.Parse(root, trustOverride);
return AttachAsync(eventId, dsse, trust, cancellationToken);
}
private static FilterDefinition<BsonDocument> BuildIdFilter(string eventId)
{
if (string.IsNullOrWhiteSpace(eventId)) throw new ArgumentException("Event identifier is required.", nameof(eventId));
return ObjectId.TryParse(eventId, out var objectId)
? Builders<BsonDocument>.Filter.Eq("_id", objectId)
: Builders<BsonDocument>.Filter.Eq("_id", eventId);
}
private static UpdateDefinition<BsonDocument> BuildUpdateDefinition(DsseProvenance dsse, TrustInfo trust)
{
var temp = new BsonDocument();
temp.AttachDsseProvenance(dsse, trust);
return Builders<BsonDocument>.Update
.Set("provenance", temp["provenance"])
.Set("trust", temp["trust"]);
}
}

View File

@@ -0,0 +1,25 @@
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Events.Mongo;
public sealed class EventWriter
{
private readonly IMongoCollection<BsonDocument> _events;
public EventWriter(IMongoDatabase db, string collectionName = "events")
{
_events = db.GetCollection<BsonDocument>(collectionName);
}
public async Task AppendEventAsync(
BsonDocument eventDoc,
DsseProvenance dsse,
TrustInfo trust,
CancellationToken ct = default)
{
eventDoc.AttachDsseProvenance(dsse, trust);
await _events.InsertOneAsync(eventDoc, cancellationToken: ct);
}
}

View File

@@ -0,0 +1,45 @@
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Events.Mongo;
public static class MongoIndexes
{
public static Task EnsureEventIndexesAsync(IMongoDatabase db, CancellationToken ct = default)
{
var events = db.GetCollection<BsonDocument>("events");
var models = new[]
{
new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("subject.digest.sha256")
.Ascending("kind")
.Ascending("provenance.dsse.rekor.logIndex"),
new CreateIndexOptions
{
Name = "events_by_subject_kind_provenance"
}),
new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("kind")
.Ascending("trust.verified")
.Ascending("provenance.dsse.rekor.logIndex"),
new CreateIndexOptions
{
Name = "events_unproven_by_kind"
}),
new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("provenance.dsse.rekor.logIndex"),
new CreateIndexOptions
{
Name = "events_by_rekor_logindex"
})
};
return events.Indexes.CreateManyAsync(models, ct);
}
}

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../__Libraries/StellaOps.Provenance.Mongo/StellaOps.Provenance.Mongo.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,43 @@
using System.Collections.Generic;
using MongoDB.Bson;
namespace StellaOps.Provenance.Mongo;
public sealed class DsseKeyInfo
{
public string KeyId { get; set; } = default!; // e.g. "cosign:SHA256-PKIX:..."
public string? Issuer { get; set; } // e.g. Fulcio issuer, KMS URI, X.509 CN
public string? Algo { get; set; } // "ECDSA" | "RSA" | "Ed25519" | "Dilithium"
}
public sealed class DsseRekorInfo
{
public long LogIndex { get; set; } // Rekor log index
public string Uuid { get; set; } = default!; // Rekor entry UUID
public long? IntegratedTime { get; set; } // unix timestamp (seconds)
public long? MirrorSeq { get; set; } // optional mirror sequence in Proof-Market ledger
}
public sealed class DsseChainLink
{
public string Type { get; set; } = default!; // e.g. "build" | "sbom" | "scan"
public string Id { get; set; } = default!; // e.g. "att:build#..."
public string Digest { get; set; } = default!; // sha256 of DSSE envelope or payload
}
public sealed class DsseProvenance
{
public string EnvelopeDigest { get; set; } = default!; // sha256 of envelope (not payload)
public string PayloadType { get; set; } = default!; // "application/vnd.in-toto+json"
public DsseKeyInfo Key { get; set; } = new();
public DsseRekorInfo? Rekor { get; set; }
public IReadOnlyCollection<DsseChainLink>? Chain { get; set; }
}
public sealed class TrustInfo
{
public bool Verified { get; set; } // local cryptographic verification
public string? Verifier { get; set; } // e.g. "Authority@stella"
public int? Witnesses { get; set; } // number of verified transparency witnesses
public double? PolicyScore { get; set; } // lattice / policy score (0..1)
}

View File

@@ -0,0 +1,203 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provenance.Mongo;
public static class ProvenanceJsonParser
{
public static (DsseProvenance Dsse, TrustInfo Trust) Parse(JsonElement root, TrustInfo? trustOverride = null)
{
var dsse = ParseDsse(root);
var trust = trustOverride ?? ParseTrust(root) ?? throw new InvalidOperationException("Provenance metadata missing trust block.");
return (dsse, trust);
}
public static (DsseProvenance Dsse, TrustInfo Trust) Parse(string json, TrustInfo? trustOverride = null)
{
using var document = JsonDocument.Parse(json);
return Parse(document.RootElement, trustOverride);
}
public static async Task<(DsseProvenance Dsse, TrustInfo Trust)> ParseAsync(
Stream utf8JsonStream,
TrustInfo? trustOverride = null,
CancellationToken cancellationToken = default)
{
var document = await JsonDocument.ParseAsync(utf8JsonStream, cancellationToken: cancellationToken).ConfigureAwait(false);
using (document)
{
return Parse(document.RootElement, trustOverride);
}
}
private static DsseProvenance ParseDsse(JsonElement root)
{
if (!root.TryGetProperty("dsse", out var dsseElement) || dsseElement.ValueKind != JsonValueKind.Object)
{
throw new InvalidOperationException("Provenance metadata missing dsse block.");
}
var keyElement = GetRequiredProperty(dsseElement, "key");
var dsse = new DsseProvenance
{
EnvelopeDigest = GetRequiredString(dsseElement, "envelopeDigest"),
PayloadType = GetRequiredString(dsseElement, "payloadType"),
Key = new DsseKeyInfo
{
KeyId = GetRequiredString(keyElement, "keyId"),
Issuer = GetOptionalString(keyElement, "issuer"),
Algo = GetOptionalString(keyElement, "algo"),
},
Chain = ParseChain(dsseElement)
};
if (dsseElement.TryGetProperty("rekor", out var rekorElement) && rekorElement.ValueKind == JsonValueKind.Object)
{
dsse.Rekor = new DsseRekorInfo
{
LogIndex = GetInt64(rekorElement, "logIndex"),
Uuid = GetRequiredString(rekorElement, "uuid"),
IntegratedTime = GetOptionalInt64(rekorElement, "integratedTime"),
MirrorSeq = GetOptionalInt64(rekorElement, "mirrorSeq")
};
}
return dsse;
}
private static IReadOnlyCollection<DsseChainLink>? ParseChain(JsonElement dsseElement)
{
if (!dsseElement.TryGetProperty("chain", out var chainElement) || chainElement.ValueKind != JsonValueKind.Array || chainElement.GetArrayLength() == 0)
{
return null;
}
var links = new List<DsseChainLink>(chainElement.GetArrayLength());
foreach (var entry in chainElement.EnumerateArray())
{
if (entry.ValueKind != JsonValueKind.Object)
{
continue;
}
var type = GetOptionalString(entry, "type");
var id = GetOptionalString(entry, "id");
var digest = GetOptionalString(entry, "digest");
if (string.IsNullOrEmpty(type) || string.IsNullOrEmpty(id) || string.IsNullOrEmpty(digest))
{
continue;
}
links.Add(new DsseChainLink
{
Type = type,
Id = id,
Digest = digest
});
}
return links.Count == 0 ? null : links;
}
private static TrustInfo? ParseTrust(JsonElement root)
{
if (!root.TryGetProperty("trust", out var trustElement) || trustElement.ValueKind != JsonValueKind.Object)
{
return null;
}
var trust = new TrustInfo
{
Verified = trustElement.TryGetProperty("verified", out var verified) && verified.ValueKind == JsonValueKind.True,
Verifier = GetOptionalString(trustElement, "verifier"),
Witnesses = trustElement.TryGetProperty("witnesses", out var witnessesElement) && witnessesElement.TryGetInt32(out var witnesses)
? witnesses
: null,
PolicyScore = trustElement.TryGetProperty("policyScore", out var scoreElement) && scoreElement.TryGetDouble(out var score)
? score
: null
};
return trust;
}
private static JsonElement GetRequiredProperty(JsonElement parent, string name)
{
if (!parent.TryGetProperty(name, out var property) || property.ValueKind == JsonValueKind.Null)
{
throw new InvalidOperationException($"Provenance metadata missing required property {name}.");
}
return property;
}
private static string GetRequiredString(JsonElement parent, string name)
{
var element = GetRequiredProperty(parent, name);
if (element.ValueKind is JsonValueKind.String)
{
var value = element.GetString();
if (!string.IsNullOrWhiteSpace(value))
{
return value;
}
}
throw new InvalidOperationException($"Provenance metadata property {name} must be a non-empty string.");
}
private static string? GetOptionalString(JsonElement parent, string name)
{
if (!parent.TryGetProperty(name, out var element))
{
return null;
}
return element.ValueKind == JsonValueKind.String ? element.GetString() : null;
}
private static long GetInt64(JsonElement parent, string name)
{
if (!parent.TryGetProperty(name, out var element))
{
throw new InvalidOperationException($"Provenance metadata missing {name}.");
}
if (element.TryGetInt64(out var value))
{
return value;
}
if (element.ValueKind == JsonValueKind.String && long.TryParse(element.GetString(), out value))
{
return value;
}
throw new InvalidOperationException($"Provenance metadata property {name} must be an integer.");
}
private static long? GetOptionalInt64(JsonElement parent, string name)
{
if (!parent.TryGetProperty(name, out var element))
{
return null;
}
if (element.TryGetInt64(out var value))
{
return value;
}
if (element.ValueKind == JsonValueKind.String && long.TryParse(element.GetString(), out value))
{
return value;
}
return null;
}
}

View File

@@ -0,0 +1,142 @@
using MongoDB.Bson;
namespace StellaOps.Provenance.Mongo;
public static class ProvenanceMongoExtensions
{
private const string ProvenanceFieldName = "provenance";
private const string DsseFieldName = "dsse";
private const string TrustFieldName = "trust";
private const string ChainFieldName = "chain";
private static BsonValue StringOrNull(string? value) =>
value is null ? BsonNull.Value : new BsonString(value);
/// <summary>
/// Attach DSSE provenance + trust info to an event document in-place.
/// Designed for generic BsonDocument-based event envelopes.
/// </summary>
public static BsonDocument AttachDsseProvenance(
this BsonDocument eventDoc,
DsseProvenance dsse,
TrustInfo trust)
{
if (eventDoc is null) throw new ArgumentNullException(nameof(eventDoc));
if (dsse is null) throw new ArgumentNullException(nameof(dsse));
if (trust is null) throw new ArgumentNullException(nameof(trust));
var dsseDoc = new BsonDocument
{
{ "envelopeDigest", dsse.EnvelopeDigest },
{ "payloadType", dsse.PayloadType },
{ "key", new BsonDocument
{
{ "keyId", dsse.Key.KeyId },
{ "issuer", StringOrNull(dsse.Key.Issuer) },
{ "algo", StringOrNull(dsse.Key.Algo) }
}
}
};
if (dsse.Rekor is not null)
{
var rekorDoc = new BsonDocument
{
{ "logIndex", dsse.Rekor.LogIndex },
{ "uuid", dsse.Rekor.Uuid }
};
if (dsse.Rekor.IntegratedTime is not null)
rekorDoc.Add("integratedTime", dsse.Rekor.IntegratedTime);
if (dsse.Rekor.MirrorSeq is not null)
rekorDoc.Add("mirrorSeq", dsse.Rekor.MirrorSeq);
dsseDoc.Add("rekor", rekorDoc);
}
if (dsse.Chain is not null && dsse.Chain.Count > 0)
{
var chainArray = new BsonArray();
foreach (var link in dsse.Chain)
{
chainArray.Add(new BsonDocument
{
{ "type", link.Type },
{ "id", link.Id },
{ "digest", link.Digest }
});
}
dsseDoc.Add(ChainFieldName, chainArray);
}
var trustDoc = new BsonDocument
{
{ "verified", trust.Verified },
{ "verifier", StringOrNull(trust.Verifier) }
};
if (trust.Witnesses is not null)
trustDoc.Add("witnesses", trust.Witnesses);
if (trust.PolicyScore is not null)
trustDoc.Add("policyScore", trust.PolicyScore);
var provenanceDoc = new BsonDocument
{
{ DsseFieldName, dsseDoc }
};
eventDoc[ProvenanceFieldName] = provenanceDoc;
eventDoc[TrustFieldName] = trustDoc;
return eventDoc;
}
/// <summary>
/// Helper to query for "cryptographically proven" events:
/// kind + subject.digest.sha256 + presence of Rekor logIndex + trust.verified = true.
/// </summary>
public static BsonDocument BuildProvenVexFilter(
string kind,
string subjectDigestSha256)
{
return new BsonDocument
{
{ "kind", kind },
{ "subject.digest.sha256", subjectDigestSha256 },
{ $"{ProvenanceFieldName}.{DsseFieldName}.rekor.logIndex", new BsonDocument("$exists", true) },
{ $"{TrustFieldName}.verified", true }
};
}
/// <summary>
/// Helper to query for events influencing policy without solid provenance.
/// </summary>
public static BsonDocument BuildUnprovenEvidenceFilter(
IEnumerable<string> kinds)
{
var kindsArray = new BsonArray(kinds);
return new BsonDocument
{
{
"kind", new BsonDocument("$in", kindsArray)
},
{
"$or", new BsonArray
{
new BsonDocument
{
{ $"{TrustFieldName}.verified", new BsonDocument("$ne", true) }
},
new BsonDocument
{
{ $"{ProvenanceFieldName}.{DsseFieldName}.rekor.logIndex",
new BsonDocument("$exists", false) }
}
}
}
};
}
}

View File

@@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
</ItemGroup>
</Project>