feat: Add in-memory implementations for issuer audit, key, repository, and trust management
Some checks failed
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
api-governance / spectral-lint (push) Has been cancelled
oas-ci / oas-validate (push) Has been cancelled

- Introduced InMemoryIssuerAuditSink to retain audit entries for testing.
- Implemented InMemoryIssuerKeyRepository for deterministic key storage.
- Created InMemoryIssuerRepository to manage issuer records in memory.
- Added InMemoryIssuerTrustRepository for managing issuer trust overrides.
- Each repository utilizes concurrent collections for thread-safe operations.
- Enhanced deprecation tracking with a comprehensive YAML schema for API governance.
This commit is contained in:
master
2025-12-11 19:47:43 +02:00
parent ab22181e8b
commit ce5ec9c158
48 changed files with 1898 additions and 1580 deletions

View File

@@ -0,0 +1,209 @@
# Deprecation metadata schema for OpenAPI extensions
# Used by API Governance tools for deprecation tracking and notification workflows.
# Per APIGOV-63-001.
schemas:
DeprecationMetadata:
type: object
description: |
Deprecation metadata for API endpoints. Applied as x-deprecation extension
on operation objects. Used by Spectral rules, changelog generation, and
notification templates.
required:
- deprecatedAt
- sunsetAt
- successorPath
- reason
properties:
deprecatedAt:
type: string
format: date-time
description: ISO 8601 timestamp when the endpoint was marked deprecated.
example: "2025-01-15T00:00:00Z"
sunsetAt:
type: string
format: date-time
description: ISO 8601 timestamp when the endpoint will be removed.
example: "2025-07-15T00:00:00Z"
successorPath:
type: string
description: Path to the replacement endpoint (if available).
example: "/v2/resources"
successorOperationId:
type: string
description: Operation ID of the replacement endpoint.
example: "getResourcesV2"
reason:
type: string
description: Human-readable explanation for the deprecation.
example: "Replaced by paginated v2 endpoint with cursor-based pagination."
migrationGuide:
type: string
format: uri
description: URL to migration documentation.
example: "https://docs.stella-ops.org/migration/resources-v2"
notificationChannels:
type: array
description: Notification channels for deprecation announcements.
items:
type: string
enum:
- slack
- teams
- email
- webhook
default:
- email
affectedConsumerHints:
type: array
description: Hints about affected consumers (e.g., SDK names, client IDs).
items:
type: string
breakingChanges:
type: array
description: List of breaking changes in the successor endpoint.
items:
$ref: '#/schemas/BreakingChange'
BreakingChange:
type: object
description: Description of a breaking change between deprecated and successor endpoints.
required:
- type
- description
properties:
type:
type: string
enum:
- parameter-removed
- parameter-renamed
- parameter-type-changed
- response-schema-changed
- header-removed
- header-renamed
- status-code-changed
- content-type-changed
- authentication-changed
description: Category of the breaking change.
path:
type: string
description: JSON path to the affected element.
example: "$.parameters[0].name"
description:
type: string
description: Human-readable description of the change.
example: "Parameter 'page' renamed to 'cursor'"
migrationAction:
type: string
description: Recommended action for consumers.
example: "Replace 'page' parameter with 'cursor' using the nextCursor value from previous response."
DeprecationNotificationEvent:
type: object
description: Event payload for deprecation notifications sent to Notify service.
required:
- eventId
- eventType
- timestamp
- tenantId
- deprecation
properties:
eventId:
type: string
format: uuid
description: Unique identifier for this notification event.
eventType:
type: string
const: "api.deprecation.announced"
description: Event type for routing in Notify service.
timestamp:
type: string
format: date-time
description: ISO 8601 timestamp when the event was generated.
tenantId:
type: string
description: Tenant scope for the notification.
deprecation:
$ref: '#/schemas/DeprecationSummary'
DeprecationSummary:
type: object
description: Summary of a deprecated endpoint for notification purposes.
required:
- service
- path
- method
- deprecatedAt
- sunsetAt
properties:
service:
type: string
description: Service name owning the deprecated endpoint.
example: "authority"
path:
type: string
description: API path of the deprecated endpoint.
example: "/v1/tokens"
method:
type: string
enum:
- GET
- POST
- PUT
- PATCH
- DELETE
- HEAD
- OPTIONS
description: HTTP method of the deprecated endpoint.
operationId:
type: string
description: OpenAPI operation ID.
example: "createToken"
deprecatedAt:
type: string
format: date-time
sunsetAt:
type: string
format: date-time
daysUntilSunset:
type: integer
description: Computed days remaining until sunset.
example: 180
successorPath:
type: string
description: Path to the replacement endpoint.
reason:
type: string
description: Deprecation reason.
migrationGuide:
type: string
format: uri
changelogUrl:
type: string
format: uri
description: URL to the API changelog entry for this deprecation.
DeprecationReport:
type: object
description: Aggregated report of all deprecations for changelog/SDK publishing.
required:
- generatedAt
- schemaVersion
- deprecations
properties:
generatedAt:
type: string
format: date-time
description: When this report was generated.
schemaVersion:
type: string
const: "api.deprecation.report@1"
totalCount:
type: integer
description: Total number of deprecated endpoints.
upcomingSunsets:
type: integer
description: Number of endpoints with sunset within 90 days.
deprecations:
type: array
items:
$ref: '#/schemas/DeprecationSummary'

View File

@@ -19,7 +19,6 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
</ItemGroup>
<ItemGroup>

View File

@@ -9,8 +9,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -1,10 +1,8 @@
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexLinksetAggregator
{
public VexAggregationResult Correlate(IEnumerable<BsonDocument> documents)
public VexAggregationResult Correlate(IEnumerable<VexObservationDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
@@ -13,39 +11,21 @@ internal sealed class VexLinksetAggregator
foreach (var document in documents)
{
var tenant = document.GetValue("tenant", "unknown").AsString;
var linksetValue = document.GetValue("linkset", new BsonDocument());
var linkset = linksetValue.IsBsonDocument ? linksetValue.AsBsonDocument : new BsonDocument();
var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray;
var statementsValue = document.GetValue("statements", new BsonArray());
var statements = statementsValue.IsBsonArray ? statementsValue.AsBsonArray : new BsonArray();
var tenant = document.Tenant;
var aliases = document.Aliases;
var statements = document.Statements;
foreach (var statementValue in statements)
{
if (!statementValue.IsBsonDocument)
{
continue;
}
statementsSeen++;
var statement = statementValue.AsBsonDocument;
var status = statement.GetValue("status", "unknown").AsString;
var justification = statement.GetValue("justification", BsonNull.Value);
var lastUpdated = statement.GetValue("last_updated", BsonNull.Value);
var productValue = statement.GetValue("product", new BsonDocument());
var product = productValue.IsBsonDocument ? productValue.AsBsonDocument : new BsonDocument();
var productKey = product.GetValue("purl", "unknown").AsString;
var status = statementValue.Status;
var justification = statementValue.Justification;
var lastUpdated = statementValue.LastUpdated;
var productKey = statementValue.Product.Purl;
foreach (var aliasValue in aliases)
foreach (var alias in aliases)
{
if (!aliasValue.IsString)
{
continue;
}
var alias = aliasValue.AsString;
var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) =>
{
var (tenantValue, aliasValue, productValue) = data;
@@ -70,7 +50,7 @@ internal sealed class VexLinksetAggregator
}
}
var eventDocuments = new List<BsonDocument>(groups.Count);
var eventDocuments = new List<VexEvent>(groups.Count);
foreach (var accumulator in groups.Values)
{
if (accumulator.ShouldEmitEvent)
@@ -93,7 +73,7 @@ internal sealed class VexLinksetAggregator
private readonly string _tenant;
private readonly string _alias;
private readonly string _product;
private DateTime? _latest;
private DateTimeOffset? _latest;
public VexAccumulator(string tenant, string alias, string product)
{
@@ -102,22 +82,22 @@ internal sealed class VexLinksetAggregator
_product = product;
}
public void AddStatement(string status, BsonValue justification, BsonValue updatedAt)
public void AddStatement(string status, string justification, DateTimeOffset updatedAt)
{
if (!_statusCounts.TryAdd(status, 1))
{
_statusCounts[status]++;
}
if (justification.IsString)
if (!string.IsNullOrEmpty(justification))
{
_justifications.Add(justification.AsString);
_justifications.Add(justification);
}
if (updatedAt.IsValidDateTime)
if (updatedAt != default)
{
var value = updatedAt.ToUniversalTime();
if (!_latest.HasValue || value > _latest)
if (!_latest.HasValue || value > _latest.Value)
{
_latest = value;
}
@@ -142,19 +122,15 @@ internal sealed class VexLinksetAggregator
}
}
public BsonDocument ToEvent()
public VexEvent ToEvent()
{
var payload = new BsonDocument
{
["tenant"] = _tenant,
["alias"] = _alias,
["product"] = _product,
["statuses"] = new BsonDocument(_statusCounts.Select(kvp => new BsonElement(kvp.Key, kvp.Value))),
["justifications"] = new BsonArray(_justifications.Select(justification => justification)),
["last_updated"] = _latest.HasValue ? _latest.Value : (BsonValue)BsonNull.Value,
};
return payload;
return new VexEvent(
_tenant,
_alias,
_product,
new Dictionary<string, int>(_statusCounts, StringComparer.Ordinal),
_justifications.ToArray(),
_latest);
}
}
}
@@ -163,4 +139,12 @@ internal sealed record VexAggregationResult(
int LinksetCount,
int StatementCount,
int EventCount,
IReadOnlyList<BsonDocument> EventDocuments);
IReadOnlyList<VexEvent> EventDocuments);
internal sealed record VexEvent(
string Tenant,
string Alias,
string Product,
IReadOnlyDictionary<string, int> Statuses,
IReadOnlyCollection<string> Justifications,
DateTimeOffset? LastUpdated);

View File

@@ -1,252 +1,194 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal static class VexObservationGenerator
{
private static readonly ImmutableArray<string> StatusPool = ImmutableArray.Create(
"affected",
"not_affected",
"under_investigation");
private static readonly ImmutableArray<string> JustificationPool = ImmutableArray.Create(
"exploitation_mitigated",
"component_not_present",
"vulnerable_code_not_present",
"vulnerable_code_not_in_execute_path");
public static IReadOnlyList<VexObservationSeed> Generate(VexScenarioConfig config)
{
ArgumentNullException.ThrowIfNull(config);
var observationCount = config.ResolveObservationCount();
var aliasGroups = config.ResolveAliasGroups();
var statementsPerObservation = config.ResolveStatementsPerObservation();
var tenantCount = config.ResolveTenantCount();
var productsPerObservation = config.ResolveProductsPerObservation();
var seed = config.ResolveSeed();
var seeds = new VexObservationSeed[observationCount];
var random = new Random(seed);
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
for (var index = 0; index < observationCount; index++)
{
var tenantIndex = index % tenantCount;
var tenant = $"tenant-{tenantIndex:D2}";
var group = index % aliasGroups;
var revision = index / aliasGroups;
var vulnerabilityAlias = $"CVE-2025-{group:D4}";
var upstreamId = $"VEX-{group:D4}-{revision:D3}";
var observationId = $"{tenant}:vex:{group:D5}:{revision:D6}";
var fetchedAt = baseTime.AddMinutes(revision);
var receivedAt = fetchedAt.AddSeconds(2);
var documentVersion = fetchedAt.AddSeconds(15).ToString("O");
var products = CreateProducts(group, revision, productsPerObservation);
var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt);
var rawPayload = CreateRawPayload(upstreamId, vulnerabilityAlias, statements);
var contentHash = ComputeContentHash(rawPayload, tenant, group, revision);
var aliases = ImmutableArray.Create(vulnerabilityAlias, $"GHSA-{group:D4}-{revision % 26 + 'a'}{revision % 26 + 'a'}");
var references = ImmutableArray.Create(
new VexReference("advisory", $"https://vendor.example/advisories/{vulnerabilityAlias.ToLowerInvariant()}"),
new VexReference("fix", $"https://vendor.example/patch/{vulnerabilityAlias.ToLowerInvariant()}"));
seeds[index] = new VexObservationSeed(
ObservationId: observationId,
Tenant: tenant,
Vendor: "excititor-bench",
Stream: "simulated",
Api: $"https://bench.stella/vex/{group:D4}/{revision:D3}",
CollectorVersion: "1.0.0-bench",
UpstreamId: upstreamId,
DocumentVersion: documentVersion,
FetchedAt: fetchedAt,
ReceivedAt: receivedAt,
ContentHash: contentHash,
VulnerabilityAlias: vulnerabilityAlias,
Aliases: aliases,
Products: products,
Statements: statements,
References: references,
ContentFormat: "CycloneDX-VEX",
SpecVersion: "1.4",
RawPayload: rawPayload);
}
return seeds;
}
private static ImmutableArray<VexProduct> CreateProducts(int group, int revision, int count)
{
var builder = ImmutableArray.CreateBuilder<VexProduct>(count);
for (var index = 0; index < count; index++)
{
var purl = $"pkg:generic/stella/product-{group:D4}-{index}@{1 + revision % 5}.{index + 1}.{revision % 9}";
builder.Add(new VexProduct(purl, $"component-{group % 30:D2}", $"namespace-{group % 10:D2}"));
}
return builder.MoveToImmutable();
}
private static ImmutableArray<BsonDocument> CreateStatements(
string vulnerabilityAlias,
ImmutableArray<VexProduct> products,
int statementsPerObservation,
Random random,
DateTimeOffset baseTime)
{
var builder = ImmutableArray.CreateBuilder<BsonDocument>(statementsPerObservation);
for (var index = 0; index < statementsPerObservation; index++)
{
var statusIndex = random.Next(StatusPool.Length);
var status = StatusPool[statusIndex];
var justification = JustificationPool[random.Next(JustificationPool.Length)];
var product = products[index % products.Length];
var statementId = $"stmt-{vulnerabilityAlias}-{index:D2}";
var document = new BsonDocument
{
["statement_id"] = statementId,
["vulnerability_alias"] = vulnerabilityAlias,
["product"] = new BsonDocument
{
["purl"] = product.Purl,
["component"] = product.Component,
["namespace"] = product.Namespace,
},
["status"] = status,
["justification"] = justification,
["impact"] = status == "affected" ? "high" : "none",
["last_updated"] = baseTime.AddMinutes(index).UtcDateTime,
};
builder.Add(document);
}
return builder.MoveToImmutable();
}
private static BsonDocument CreateRawPayload(string upstreamId, string vulnerabilityAlias, ImmutableArray<BsonDocument> statements)
{
var doc = new BsonDocument
{
["documentId"] = upstreamId,
["title"] = $"Simulated VEX report {upstreamId}",
["summary"] = $"Synthetic VEX payload for {vulnerabilityAlias}.",
["statements"] = new BsonArray(statements),
};
return doc;
}
private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision)
{
using var sha256 = SHA256.Create();
var seed = $"{tenant}|{group}|{revision}";
var rawBytes = rawPayload.ToBson();
var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed);
var combined = new byte[rawBytes.Length + seedBytes.Length];
Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length);
Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length);
var hash = sha256.ComputeHash(combined);
return $"sha256:{Convert.ToHexString(hash)}";
}
}
internal sealed record VexObservationSeed(
string ObservationId,
string Tenant,
string Vendor,
string Stream,
string Api,
string CollectorVersion,
string UpstreamId,
string DocumentVersion,
DateTimeOffset FetchedAt,
DateTimeOffset ReceivedAt,
string ContentHash,
string VulnerabilityAlias,
ImmutableArray<string> Aliases,
ImmutableArray<VexProduct> Products,
ImmutableArray<BsonDocument> Statements,
ImmutableArray<VexReference> References,
string ContentFormat,
string SpecVersion,
BsonDocument RawPayload)
{
public BsonDocument ToBsonDocument()
{
var aliases = new BsonArray(Aliases.Select(alias => alias));
var statements = new BsonArray(Statements);
var productsArray = new BsonArray(Products.Select(product => new BsonDocument
{
["purl"] = product.Purl,
["component"] = product.Component,
["namespace"] = product.Namespace,
}));
var references = new BsonArray(References.Select(reference => new BsonDocument
{
["type"] = reference.Type,
["url"] = reference.Url,
}));
var document = new BsonDocument
{
["_id"] = ObservationId,
["tenant"] = Tenant,
["source"] = new BsonDocument
{
["vendor"] = Vendor,
["stream"] = Stream,
["api"] = Api,
["collector_version"] = CollectorVersion,
},
["upstream"] = new BsonDocument
{
["upstream_id"] = UpstreamId,
["document_version"] = DocumentVersion,
["fetched_at"] = FetchedAt.UtcDateTime,
["received_at"] = ReceivedAt.UtcDateTime,
["content_hash"] = ContentHash,
["signature"] = new BsonDocument
{
["present"] = false,
["format"] = BsonNull.Value,
["key_id"] = BsonNull.Value,
["signature"] = BsonNull.Value,
},
},
["content"] = new BsonDocument
{
["format"] = ContentFormat,
["spec_version"] = SpecVersion,
["raw"] = RawPayload,
},
["identifiers"] = new BsonDocument
{
["aliases"] = aliases,
["primary"] = VulnerabilityAlias,
},
["statements"] = statements,
["linkset"] = new BsonDocument
{
["aliases"] = aliases,
["products"] = productsArray,
["references"] = references,
["reconciled_from"] = new BsonArray { "/statements" },
},
["supersedes"] = BsonNull.Value,
};
return document;
}
}
internal sealed record VexProduct(string Purl, string Component, string Namespace);
internal sealed record VexReference(string Type, string Url);
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal static class VexObservationGenerator
{
private static readonly ImmutableArray<string> StatusPool = ImmutableArray.Create(
"affected",
"not_affected",
"under_investigation");
private static readonly ImmutableArray<string> JustificationPool = ImmutableArray.Create(
"exploitation_mitigated",
"component_not_present",
"vulnerable_code_not_present",
"vulnerable_code_not_in_execute_path");
public static IReadOnlyList<VexObservationSeed> Generate(VexScenarioConfig config)
{
ArgumentNullException.ThrowIfNull(config);
var observationCount = config.ResolveObservationCount();
var aliasGroups = config.ResolveAliasGroups();
var statementsPerObservation = config.ResolveStatementsPerObservation();
var tenantCount = config.ResolveTenantCount();
var productsPerObservation = config.ResolveProductsPerObservation();
var seed = config.ResolveSeed();
var seeds = new VexObservationSeed[observationCount];
var random = new Random(seed);
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
for (var index = 0; index < observationCount; index++)
{
var tenantIndex = index % tenantCount;
var tenant = $"tenant-{tenantIndex:D2}";
var group = index % aliasGroups;
var revision = index / aliasGroups;
var vulnerabilityAlias = $"CVE-2025-{group:D4}";
var upstreamId = $"VEX-{group:D4}-{revision:D3}";
var observationId = $"{tenant}:vex:{group:D5}:{revision:D6}";
var fetchedAt = baseTime.AddMinutes(revision);
var receivedAt = fetchedAt.AddSeconds(2);
var documentVersion = fetchedAt.AddSeconds(15).ToString("O");
var products = CreateProducts(group, revision, productsPerObservation);
var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt);
var contentHash = ComputeContentHash(upstreamId, vulnerabilityAlias, statements, tenant, group, revision);
var aliases = ImmutableArray.Create(vulnerabilityAlias, $"GHSA-{group:D4}-{revision % 26 + 'a'}{revision % 26 + 'a'}");
var references = ImmutableArray.Create(
new VexReference("advisory", $"https://vendor.example/advisories/{vulnerabilityAlias.ToLowerInvariant()}"),
new VexReference("fix", $"https://vendor.example/patch/{vulnerabilityAlias.ToLowerInvariant()}"));
seeds[index] = new VexObservationSeed(
ObservationId: observationId,
Tenant: tenant,
Vendor: "excititor-bench",
Stream: "simulated",
Api: $"https://bench.stella/vex/{group:D4}/{revision:D3}",
CollectorVersion: "1.0.0-bench",
UpstreamId: upstreamId,
DocumentVersion: documentVersion,
FetchedAt: fetchedAt,
ReceivedAt: receivedAt,
ContentHash: contentHash,
VulnerabilityAlias: vulnerabilityAlias,
Aliases: aliases,
Products: products,
Statements: statements,
References: references,
ContentFormat: "CycloneDX-VEX",
SpecVersion: "1.4");
}
return seeds;
}
private static ImmutableArray<VexProduct> CreateProducts(int group, int revision, int count)
{
var builder = ImmutableArray.CreateBuilder<VexProduct>(count);
for (var index = 0; index < count; index++)
{
var purl = $"pkg:generic/stella/product-{group:D4}-{index}@{1 + revision % 5}.{index + 1}.{revision % 9}";
builder.Add(new VexProduct(purl, $"component-{group % 30:D2}", $"namespace-{group % 10:D2}"));
}
return builder.MoveToImmutable();
}
private static ImmutableArray<VexStatement> CreateStatements(
string vulnerabilityAlias,
ImmutableArray<VexProduct> products,
int statementsPerObservation,
Random random,
DateTimeOffset baseTime)
{
var builder = ImmutableArray.CreateBuilder<VexStatement>(statementsPerObservation);
for (var index = 0; index < statementsPerObservation; index++)
{
var statusIndex = random.Next(StatusPool.Length);
var status = StatusPool[statusIndex];
var justification = JustificationPool[random.Next(JustificationPool.Length)];
var product = products[index % products.Length];
var statementId = $"stmt-{vulnerabilityAlias}-{index:D2}";
var lastUpdated = baseTime.AddMinutes(index).ToUniversalTime();
builder.Add(new VexStatement(
StatementId: statementId,
VulnerabilityAlias: vulnerabilityAlias,
Product: product,
Status: status,
Justification: justification,
LastUpdated: lastUpdated));
}
return builder.MoveToImmutable();
}
private static string ComputeContentHash(
string upstreamId,
string vulnerabilityAlias,
ImmutableArray<VexStatement> statements,
string tenant,
int group,
int revision)
{
using var sha256 = SHA256.Create();
var builder = new StringBuilder();
builder.Append(tenant).Append('|').Append(group).Append('|').Append(revision).Append('|');
builder.Append(upstreamId).Append('|').Append(vulnerabilityAlias).Append('|');
foreach (var statement in statements)
{
builder.Append(statement.StatementId).Append('|')
.Append(statement.Status).Append('|')
.Append(statement.Product.Purl).Append('|')
.Append(statement.Justification).Append('|')
.Append(statement.LastUpdated.ToUniversalTime().ToString("O")).Append('|');
}
var data = Encoding.UTF8.GetBytes(builder.ToString());
var hash = sha256.ComputeHash(data);
return $"sha256:{Convert.ToHexString(hash)}";
}
}
internal sealed record VexObservationSeed(
string ObservationId,
string Tenant,
string Vendor,
string Stream,
string Api,
string CollectorVersion,
string UpstreamId,
string DocumentVersion,
DateTimeOffset FetchedAt,
DateTimeOffset ReceivedAt,
string ContentHash,
string VulnerabilityAlias,
ImmutableArray<string> Aliases,
ImmutableArray<VexProduct> Products,
ImmutableArray<VexStatement> Statements,
ImmutableArray<VexReference> References,
string ContentFormat,
string SpecVersion)
{
public VexObservationDocument ToDocument()
{
return new VexObservationDocument(
Tenant,
Aliases,
Statements);
}
}
internal sealed record VexObservationDocument(
string Tenant,
ImmutableArray<string> Aliases,
ImmutableArray<VexStatement> Statements);
internal sealed record VexStatement(
string StatementId,
string VulnerabilityAlias,
VexProduct Product,
string Status,
string Justification,
DateTimeOffset LastUpdated);
internal sealed record VexProduct(string Purl, string Component, string Namespace);
internal sealed record VexReference(string Type, string Url);

View File

@@ -1,7 +1,4 @@
using System.Diagnostics;
using EphemeralMongo;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Bench.LinkNotMerge.Vex;
@@ -29,38 +26,19 @@ internal sealed class VexScenarioRunner
var allocated = new double[iterations];
var observationThroughputs = new double[iterations];
var eventThroughputs = new double[iterations];
VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty<BsonDocument>());
VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty<VexEvent>());
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
using var runner = MongoRunner.Run(new MongoRunnerOptions
{
UseSingleNodeReplicaSet = false,
});
var client = new MongoClient(runner.ConnectionString);
var database = client.GetDatabase("linknotmerge_vex_bench");
var collection = database.GetCollection<BsonDocument>("vex_observations");
CreateIndexes(collection, cancellationToken);
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var documents = collection
.Find(FilterDefinition<BsonDocument>.Empty)
.Project(Builders<BsonDocument>.Projection
.Include("tenant")
.Include("statements")
.Include("linkset"))
.ToList(cancellationToken);
var aggregator = new VexLinksetAggregator();
lastAggregation = aggregator.Correlate(documents);
correlationStopwatch.Stop();
@@ -95,44 +73,26 @@ internal sealed class VexScenarioRunner
AggregationResult: lastAggregation);
}
private static void InsertObservations(
IMongoCollection<BsonDocument> collection,
private static IReadOnlyList<VexObservationDocument> InsertObservations(
IReadOnlyList<VexObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
var documents = new List<VexObservationDocument>(seeds.Count);
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<BsonDocument>(remaining);
var batch = new List<VexObservationDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToBsonDocument());
batch.Add(seeds[offset + index].ToDocument());
}
collection.InsertMany(batch, new InsertManyOptions
{
IsOrdered = false,
BypassDocumentValidation = true,
}, cancellationToken);
documents.AddRange(batch);
}
}
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
{
var indexKeys = Builders<BsonDocument>.IndexKeys
.Ascending("tenant")
.Ascending("linkset.aliases");
try
{
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
}
catch
{
// non-fatal
}
return documents;
}
}

View File

@@ -19,7 +19,6 @@
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,7 +1,4 @@
using System.Diagnostics;
using EphemeralMongo;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Bench.LinkNotMerge;
@@ -35,30 +32,12 @@ internal sealed class LinkNotMergeScenarioRunner
{
cancellationToken.ThrowIfCancellationRequested();
using var runner = MongoRunner.Run(new MongoRunnerOptions
{
UseSingleNodeReplicaSet = false,
});
var client = new MongoClient(runner.ConnectionString);
var database = client.GetDatabase("linknotmerge_bench");
var collection = database.GetCollection<BsonDocument>("advisory_observations");
CreateIndexes(collection, cancellationToken);
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var documents = collection
.Find(FilterDefinition<BsonDocument>.Empty)
.Project(Builders<BsonDocument>.Projection
.Include("tenant")
.Include("linkset"))
.ToList(cancellationToken);
var correlator = new LinksetAggregator();
lastAggregation = correlator.Correlate(documents);
correlationStopwatch.Stop();
@@ -92,44 +71,26 @@ internal sealed class LinkNotMergeScenarioRunner
AggregationResult: lastAggregation);
}
private static void InsertObservations(
IMongoCollection<BsonDocument> collection,
private static IReadOnlyList<ObservationDocument> InsertObservations(
IReadOnlyList<ObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
var documents = new List<ObservationDocument>(seeds.Count);
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<BsonDocument>(remaining);
var batch = new List<ObservationDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToBsonDocument());
batch.Add(seeds[offset + index].ToDocument());
}
collection.InsertMany(batch, new InsertManyOptions
{
IsOrdered = false,
BypassDocumentValidation = true,
}, cancellationToken);
documents.AddRange(batch);
}
}
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
{
var indexKeys = Builders<BsonDocument>.IndexKeys
.Ascending("tenant")
.Ascending("identifiers.aliases");
try
{
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
}
catch
{
// Index creation failures should not abort the benchmark; they may occur when running multiple iterations concurrently.
}
return documents;
}
}

View File

@@ -1,10 +1,8 @@
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinksetAggregator
{
public LinksetAggregationResult Correlate(IEnumerable<BsonDocument> documents)
public LinksetAggregationResult Correlate(IEnumerable<ObservationDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
@@ -15,21 +13,16 @@ internal sealed class LinksetAggregator
{
totalObservations++;
var tenant = document.GetValue("tenant", "unknown").AsString;
var linkset = document.GetValue("linkset", new BsonDocument()).AsBsonDocument;
var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray;
var purls = linkset.GetValue("purls", new BsonArray()).AsBsonArray;
var cpes = linkset.GetValue("cpes", new BsonArray()).AsBsonArray;
var references = linkset.GetValue("references", new BsonArray()).AsBsonArray;
var tenant = document.Tenant;
var linkset = document.Linkset;
var aliases = linkset.Aliases;
var purls = linkset.Purls;
var cpes = linkset.Cpes;
var references = linkset.References;
foreach (var aliasValue in aliases)
{
if (!aliasValue.IsString)
{
continue;
}
var alias = aliasValue.AsString;
var alias = aliasValue;
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
{
var (tenantValue, aliasValue) = data;
@@ -91,42 +84,30 @@ internal sealed class LinksetAggregator
public int ReferenceCount => _references.Count;
public void AddPurls(BsonArray array)
public void AddPurls(IEnumerable<string> array)
{
foreach (var item in array)
{
if (item.IsString)
{
_purls.Add(item.AsString);
}
if (!string.IsNullOrEmpty(item))
_purls.Add(item);
}
}
public void AddCpes(BsonArray array)
public void AddCpes(IEnumerable<string> array)
{
foreach (var item in array)
{
if (item.IsString)
{
_cpes.Add(item.AsString);
}
if (!string.IsNullOrEmpty(item))
_cpes.Add(item);
}
}
public void AddReferences(BsonArray array)
public void AddReferences(IEnumerable<ObservationReference> array)
{
foreach (var item in array)
{
if (!item.IsBsonDocument)
{
continue;
}
var document = item.AsBsonDocument;
if (document.TryGetValue("url", out var urlValue) && urlValue.IsString)
{
_references.Add(urlValue.AsString);
}
if (!string.IsNullOrEmpty(item.Url))
_references.Add(item.Url);
}
}
}

View File

@@ -1,270 +1,198 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge;
internal static class ObservationGenerator
{
public static IReadOnlyList<ObservationSeed> Generate(LinkNotMergeScenarioConfig config)
{
ArgumentNullException.ThrowIfNull(config);
var observationCount = config.ResolveObservationCount();
var aliasGroups = config.ResolveAliasGroups();
var purlsPerObservation = config.ResolvePurlsPerObservation();
var cpesPerObservation = config.ResolveCpesPerObservation();
var referencesPerObservation = config.ResolveReferencesPerObservation();
var tenantCount = config.ResolveTenantCount();
var seed = config.ResolveSeed();
var seeds = new ObservationSeed[observationCount];
var random = new Random(seed);
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
for (var index = 0; index < observationCount; index++)
{
var tenantIndex = index % tenantCount;
var tenant = $"tenant-{tenantIndex:D2}";
var group = index % aliasGroups;
var revision = index / aliasGroups;
var primaryAlias = $"CVE-2025-{group:D4}";
var vendorAlias = $"VENDOR-{group:D4}";
var thirdAlias = $"GHSA-{group:D4}-{(revision % 26 + 'a')}{(revision % 26 + 'a')}";
var aliases = ImmutableArray.Create(primaryAlias, vendorAlias, thirdAlias);
var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}";
var upstreamId = primaryAlias;
var documentVersion = baseTime.AddMinutes(revision).ToString("O");
var fetchedAt = baseTime.AddSeconds(index % 1_800);
var receivedAt = fetchedAt.AddSeconds(1);
var purls = CreatePurls(group, revision, purlsPerObservation);
var cpes = CreateCpes(group, revision, cpesPerObservation);
var references = CreateReferences(primaryAlias, referencesPerObservation);
var rawPayload = CreateRawPayload(primaryAlias, vendorAlias, purls, cpes, references);
var contentHash = ComputeContentHash(rawPayload, tenant, group, revision);
seeds[index] = new ObservationSeed(
ObservationId: observationId,
Tenant: tenant,
Vendor: "concelier-bench",
Stream: "simulated",
Api: $"https://bench.stella/{group:D4}/{revision:D2}",
CollectorVersion: "1.0.0-bench",
UpstreamId: upstreamId,
DocumentVersion: documentVersion,
FetchedAt: fetchedAt,
ReceivedAt: receivedAt,
ContentHash: contentHash,
Aliases: aliases,
Purls: purls,
Cpes: cpes,
References: references,
ContentFormat: "CSAF",
SpecVersion: "2.0",
RawPayload: rawPayload);
}
return seeds;
}
private static ImmutableArray<string> CreatePurls(int group, int revision, int count)
{
if (count <= 0)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(count);
for (var index = 0; index < count; index++)
{
var version = $"{revision % 9 + 1}.{index + 1}.{group % 10}";
builder.Add($"pkg:generic/stella/sample-{group:D4}-{index}@{version}");
}
return builder.MoveToImmutable();
}
private static ImmutableArray<string> CreateCpes(int group, int revision, int count)
{
if (count <= 0)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(count);
for (var index = 0; index < count; index++)
{
var component = $"benchtool{group % 50:D2}";
var version = $"{revision % 5}.{index}";
builder.Add($"cpe:2.3:a:stellaops:{component}:{version}:*:*:*:*:*:*:*");
}
return builder.MoveToImmutable();
}
private static ImmutableArray<ObservationReference> CreateReferences(string primaryAlias, int count)
{
if (count <= 0)
{
return ImmutableArray<ObservationReference>.Empty;
}
var builder = ImmutableArray.CreateBuilder<ObservationReference>(count);
for (var index = 0; index < count; index++)
{
builder.Add(new ObservationReference(
Type: index % 2 == 0 ? "advisory" : "patch",
Url: $"https://vendor.example/{primaryAlias.ToLowerInvariant()}/ref/{index:D2}"));
}
return builder.MoveToImmutable();
}
private static BsonDocument CreateRawPayload(
string primaryAlias,
string vendorAlias,
IReadOnlyCollection<string> purls,
IReadOnlyCollection<string> cpes,
IReadOnlyCollection<ObservationReference> references)
{
var document = new BsonDocument
{
["id"] = primaryAlias,
["vendorId"] = vendorAlias,
["title"] = $"Simulated advisory {primaryAlias}",
["summary"] = "Synthetic payload produced by Link-Not-Merge benchmark.",
["metrics"] = new BsonArray
{
new BsonDocument
{
["kind"] = "cvss:v3.1",
["score"] = 7.5,
["vector"] = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
},
},
};
if (purls.Count > 0)
{
document["purls"] = new BsonArray(purls);
}
if (cpes.Count > 0)
{
document["cpes"] = new BsonArray(cpes);
}
if (references.Count > 0)
{
document["references"] = new BsonArray(references.Select(reference => new BsonDocument
{
["type"] = reference.Type,
["url"] = reference.Url,
}));
}
return document;
}
private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision)
{
using var sha256 = SHA256.Create();
var seed = $"{tenant}|{group}|{revision}";
var rawBytes = rawPayload.ToBson();
var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed);
var combined = new byte[rawBytes.Length + seedBytes.Length];
Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length);
Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length);
var hash = sha256.ComputeHash(combined);
return $"sha256:{Convert.ToHexString(hash)}";
}
}
internal sealed record ObservationSeed(
string ObservationId,
string Tenant,
string Vendor,
string Stream,
string Api,
string CollectorVersion,
string UpstreamId,
string DocumentVersion,
DateTimeOffset FetchedAt,
DateTimeOffset ReceivedAt,
string ContentHash,
ImmutableArray<string> Aliases,
ImmutableArray<string> Purls,
ImmutableArray<string> Cpes,
ImmutableArray<ObservationReference> References,
string ContentFormat,
string SpecVersion,
BsonDocument RawPayload)
{
public BsonDocument ToBsonDocument()
{
var aliases = new BsonArray(Aliases.Select(alias => alias));
var purls = new BsonArray(Purls.Select(purl => purl));
var cpes = new BsonArray(Cpes.Select(cpe => cpe));
var references = new BsonArray(References.Select(reference => new BsonDocument
{
["type"] = reference.Type,
["url"] = reference.Url,
}));
var document = new BsonDocument
{
["_id"] = ObservationId,
["tenant"] = Tenant,
["source"] = new BsonDocument
{
["vendor"] = Vendor,
["stream"] = Stream,
["api"] = Api,
["collector_version"] = CollectorVersion,
},
["upstream"] = new BsonDocument
{
["upstream_id"] = UpstreamId,
["document_version"] = DocumentVersion,
["fetched_at"] = FetchedAt.UtcDateTime,
["received_at"] = ReceivedAt.UtcDateTime,
["content_hash"] = ContentHash,
["signature"] = new BsonDocument
{
["present"] = false,
["format"] = BsonNull.Value,
["key_id"] = BsonNull.Value,
["signature"] = BsonNull.Value,
},
},
["content"] = new BsonDocument
{
["format"] = ContentFormat,
["spec_version"] = SpecVersion,
["raw"] = RawPayload,
},
["identifiers"] = new BsonDocument
{
["aliases"] = aliases,
["primary"] = UpstreamId,
["cve"] = Aliases.FirstOrDefault(alias => alias.StartsWith("CVE-", StringComparison.Ordinal)) ?? UpstreamId,
},
["linkset"] = new BsonDocument
{
["aliases"] = aliases,
["purls"] = purls,
["cpes"] = cpes,
["references"] = references,
["reconciled_from"] = new BsonArray { "/content/product_tree" },
},
["supersedes"] = BsonNull.Value,
};
return document;
}
}
internal sealed record ObservationReference(string Type, string Url);
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge;
internal static class ObservationGenerator
{
public static IReadOnlyList<ObservationSeed> Generate(LinkNotMergeScenarioConfig config)
{
ArgumentNullException.ThrowIfNull(config);
var observationCount = config.ResolveObservationCount();
var aliasGroups = config.ResolveAliasGroups();
var purlsPerObservation = config.ResolvePurlsPerObservation();
var cpesPerObservation = config.ResolveCpesPerObservation();
var referencesPerObservation = config.ResolveReferencesPerObservation();
var tenantCount = config.ResolveTenantCount();
var seed = config.ResolveSeed();
var seeds = new ObservationSeed[observationCount];
var random = new Random(seed);
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
for (var index = 0; index < observationCount; index++)
{
var tenantIndex = index % tenantCount;
var tenant = $"tenant-{tenantIndex:D2}";
var group = index % aliasGroups;
var revision = index / aliasGroups;
var primaryAlias = $"CVE-2025-{group:D4}";
var vendorAlias = $"VENDOR-{group:D4}";
var thirdAlias = $"GHSA-{group:D4}-{(revision % 26 + 'a')}{(revision % 26 + 'a')}";
var aliases = ImmutableArray.Create(primaryAlias, vendorAlias, thirdAlias);
var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}";
var upstreamId = primaryAlias;
var documentVersion = baseTime.AddMinutes(revision).ToString("O");
var fetchedAt = baseTime.AddSeconds(index % 1_800);
var receivedAt = fetchedAt.AddSeconds(1);
var purls = CreatePurls(group, revision, purlsPerObservation);
var cpes = CreateCpes(group, revision, cpesPerObservation);
var references = CreateReferences(primaryAlias, referencesPerObservation);
var contentHash = ComputeContentHash(primaryAlias, vendorAlias, purls, cpes, references, tenant, group, revision);
seeds[index] = new ObservationSeed(
ObservationId: observationId,
Tenant: tenant,
Vendor: "concelier-bench",
Stream: "simulated",
Api: $"https://bench.stella/{group:D4}/{revision:D2}",
CollectorVersion: "1.0.0-bench",
UpstreamId: upstreamId,
DocumentVersion: documentVersion,
FetchedAt: fetchedAt,
ReceivedAt: receivedAt,
ContentHash: contentHash,
Aliases: aliases,
Purls: purls,
Cpes: cpes,
References: references,
ContentFormat: "CSAF",
SpecVersion: "2.0");
}
return seeds;
}
private static ImmutableArray<string> CreatePurls(int group, int revision, int count)
{
if (count <= 0)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(count);
for (var index = 0; index < count; index++)
{
var version = $"{revision % 9 + 1}.{index + 1}.{group % 10}";
builder.Add($"pkg:generic/stella/sample-{group:D4}-{index}@{version}");
}
return builder.MoveToImmutable();
}
private static ImmutableArray<string> CreateCpes(int group, int revision, int count)
{
if (count <= 0)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(count);
for (var index = 0; index < count; index++)
{
var component = $"benchtool{group % 50:D2}";
var version = $"{revision % 5}.{index}";
builder.Add($"cpe:2.3:a:stellaops:{component}:{version}:*:*:*:*:*:*:*");
}
return builder.MoveToImmutable();
}
private static ImmutableArray<ObservationReference> CreateReferences(string primaryAlias, int count)
{
if (count <= 0)
{
return ImmutableArray<ObservationReference>.Empty;
}
var builder = ImmutableArray.CreateBuilder<ObservationReference>(count);
for (var index = 0; index < count; index++)
{
builder.Add(new ObservationReference(
Type: index % 2 == 0 ? "advisory" : "patch",
Url: $"https://vendor.example/{primaryAlias.ToLowerInvariant()}/ref/{index:D2}"));
}
return builder.MoveToImmutable();
}
private static string ComputeContentHash(
string primaryAlias,
string vendorAlias,
IReadOnlyCollection<string> purls,
IReadOnlyCollection<string> cpes,
IReadOnlyCollection<ObservationReference> references,
string tenant,
int group,
int revision)
{
using var sha256 = SHA256.Create();
var builder = new StringBuilder();
builder.Append(tenant).Append('|').Append(group).Append('|').Append(revision).Append('|');
builder.Append(primaryAlias).Append('|').Append(vendorAlias).Append('|');
foreach (var purl in purls)
{
builder.Append(purl).Append('|');
}
foreach (var cpe in cpes)
{
builder.Append(cpe).Append('|');
}
foreach (var reference in references)
{
builder.Append(reference.Type).Append(':').Append(reference.Url).Append('|');
}
var data = Encoding.UTF8.GetBytes(builder.ToString());
var hash = sha256.ComputeHash(data);
return $"sha256:{Convert.ToHexString(hash)}";
}
}
internal sealed record ObservationSeed(
string ObservationId,
string Tenant,
string Vendor,
string Stream,
string Api,
string CollectorVersion,
string UpstreamId,
string DocumentVersion,
DateTimeOffset FetchedAt,
DateTimeOffset ReceivedAt,
string ContentHash,
ImmutableArray<string> Aliases,
ImmutableArray<string> Purls,
ImmutableArray<string> Cpes,
ImmutableArray<ObservationReference> References,
string ContentFormat,
string SpecVersion)
{
public ObservationDocument ToDocument()
{
return new ObservationDocument(
Tenant,
new LinksetDocument(
Aliases,
Purls,
Cpes,
References));
}
}
internal sealed record ObservationDocument(string Tenant, LinksetDocument Linkset);
internal sealed record LinksetDocument(
ImmutableArray<string> Aliases,
ImmutableArray<string> Purls,
ImmutableArray<string> Cpes,
ImmutableArray<ObservationReference> References);
internal sealed record ObservationReference(string Type, string Url);

View File

@@ -9,8 +9,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -1,35 +0,0 @@
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
using StellaOps.IssuerDirectory.Infrastructure.Documents;
using StellaOps.IssuerDirectory.Infrastructure.Internal;
namespace StellaOps.IssuerDirectory.Infrastructure.Audit;
public sealed class MongoIssuerAuditSink : IIssuerAuditSink
{
private readonly IssuerDirectoryMongoContext _context;
public MongoIssuerAuditSink(IssuerDirectoryMongoContext context)
{
_context = context ?? throw new ArgumentNullException(nameof(context));
}
public async Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(entry);
var document = new IssuerAuditDocument
{
Id = Guid.NewGuid().ToString("N"),
TenantId = entry.TenantId,
IssuerId = entry.IssuerId,
Action = entry.Action,
TimestampUtc = entry.TimestampUtc,
Actor = entry.Actor,
Reason = entry.Reason,
Metadata = new Dictionary<string, string>(entry.Metadata, StringComparer.OrdinalIgnoreCase)
};
await _context.Audits.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -1,31 +0,0 @@
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.IssuerDirectory.Infrastructure.Documents;
[BsonIgnoreExtraElements]
public sealed class IssuerAuditDocument
{
[BsonId]
public string Id { get; set; } = Guid.NewGuid().ToString("N");
[BsonElement("tenant_id")]
public string TenantId { get; set; } = string.Empty;
[BsonElement("issuer_id")]
public string IssuerId { get; set; } = string.Empty;
[BsonElement("action")]
public string Action { get; set; } = string.Empty;
[BsonElement("timestamp")]
public DateTimeOffset TimestampUtc { get; set; }
[BsonElement("actor")]
public string Actor { get; set; } = string.Empty;
[BsonElement("reason")]
public string? Reason { get; set; }
[BsonElement("metadata")]
public Dictionary<string, string> Metadata { get; set; } = new(StringComparer.OrdinalIgnoreCase);
}

View File

@@ -1,103 +0,0 @@
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.IssuerDirectory.Infrastructure.Documents;
[BsonIgnoreExtraElements]
public sealed class IssuerDocument
{
[BsonId]
public string Id { get; set; } = string.Empty;
[BsonElement("tenant_id")]
public string TenantId { get; set; } = string.Empty;
[BsonElement("display_name")]
public string DisplayName { get; set; } = string.Empty;
[BsonElement("slug")]
public string Slug { get; set; } = string.Empty;
[BsonElement("description")]
public string? Description { get; set; }
[BsonElement("contact")]
public IssuerContactDocument Contact { get; set; } = new();
[BsonElement("metadata")]
public IssuerMetadataDocument Metadata { get; set; } = new();
[BsonElement("endpoints")]
public List<IssuerEndpointDocument> Endpoints { get; set; } = new();
[BsonElement("tags")]
public List<string> Tags { get; set; } = new();
[BsonElement("created_at")]
public DateTimeOffset CreatedAtUtc { get; set; }
[BsonElement("created_by")]
public string CreatedBy { get; set; } = string.Empty;
[BsonElement("updated_at")]
public DateTimeOffset UpdatedAtUtc { get; set; }
[BsonElement("updated_by")]
public string UpdatedBy { get; set; } = string.Empty;
[BsonElement("is_seed")]
public bool IsSystemSeed { get; set; }
}
[BsonIgnoreExtraElements]
public sealed class IssuerContactDocument
{
[BsonElement("email")]
public string? Email { get; set; }
[BsonElement("phone")]
public string? Phone { get; set; }
[BsonElement("website")]
public string? Website { get; set; }
[BsonElement("timezone")]
public string? Timezone { get; set; }
}
[BsonIgnoreExtraElements]
public sealed class IssuerMetadataDocument
{
[BsonElement("cve_org_id")]
public string? CveOrgId { get; set; }
[BsonElement("csaf_publisher_id")]
public string? CsafPublisherId { get; set; }
[BsonElement("security_advisories_url")]
public string? SecurityAdvisoriesUrl { get; set; }
[BsonElement("catalog_url")]
public string? CatalogUrl { get; set; }
[BsonElement("languages")]
public List<string> Languages { get; set; } = new();
[BsonElement("attributes")]
public Dictionary<string, string> Attributes { get; set; } = new(StringComparer.OrdinalIgnoreCase);
}
[BsonIgnoreExtraElements]
public sealed class IssuerEndpointDocument
{
[BsonElement("kind")]
public string Kind { get; set; } = string.Empty;
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("format")]
public string? Format { get; set; }
[BsonElement("requires_auth")]
public bool RequiresAuthentication { get; set; }
}

View File

@@ -1,55 +0,0 @@
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.IssuerDirectory.Infrastructure.Documents;
[BsonIgnoreExtraElements]
public sealed class IssuerKeyDocument
{
[BsonId]
public string Id { get; set; } = string.Empty;
[BsonElement("issuer_id")]
public string IssuerId { get; set; } = string.Empty;
[BsonElement("tenant_id")]
public string TenantId { get; set; } = string.Empty;
[BsonElement("type")]
public string Type { get; set; } = string.Empty;
[BsonElement("status")]
public string Status { get; set; } = string.Empty;
[BsonElement("material_format")]
public string MaterialFormat { get; set; } = string.Empty;
[BsonElement("material_value")]
public string MaterialValue { get; set; } = string.Empty;
[BsonElement("fingerprint")]
public string Fingerprint { get; set; } = string.Empty;
[BsonElement("created_at")]
public DateTimeOffset CreatedAtUtc { get; set; }
[BsonElement("created_by")]
public string CreatedBy { get; set; } = string.Empty;
[BsonElement("updated_at")]
public DateTimeOffset UpdatedAtUtc { get; set; }
[BsonElement("updated_by")]
public string UpdatedBy { get; set; } = string.Empty;
[BsonElement("expires_at")]
public DateTimeOffset? ExpiresAtUtc { get; set; }
[BsonElement("retired_at")]
public DateTimeOffset? RetiredAtUtc { get; set; }
[BsonElement("revoked_at")]
public DateTimeOffset? RevokedAtUtc { get; set; }
[BsonElement("replaces_key_id")]
public string? ReplacesKeyId { get; set; }
}

View File

@@ -1,34 +0,0 @@
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.IssuerDirectory.Infrastructure.Documents;
[BsonIgnoreExtraElements]
public sealed class IssuerTrustDocument
{
[BsonId]
public string Id { get; set; } = string.Empty;
[BsonElement("issuer_id")]
public string IssuerId { get; set; } = string.Empty;
[BsonElement("tenant_id")]
public string TenantId { get; set; } = string.Empty;
[BsonElement("weight")]
public decimal Weight { get; set; }
[BsonElement("reason")]
public string? Reason { get; set; }
[BsonElement("created_at")]
public DateTimeOffset CreatedAtUtc { get; set; }
[BsonElement("created_by")]
public string CreatedBy { get; set; } = string.Empty;
[BsonElement("updated_at")]
public DateTimeOffset UpdatedAtUtc { get; set; }
[BsonElement("updated_by")]
public string UpdatedBy { get; set; } = string.Empty;
}

View File

@@ -0,0 +1,27 @@
using System.Collections.Concurrent;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
namespace StellaOps.IssuerDirectory.Infrastructure.InMemory;
/// <summary>
/// In-memory audit sink; retains last N entries for inspection/testing.
/// </summary>
internal sealed class InMemoryIssuerAuditSink : IIssuerAuditSink
{
private readonly ConcurrentQueue<IssuerAuditEntry> _entries = new();
private const int MaxEntries = 1024;
public Task WriteAsync(IssuerAuditEntry entry, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(entry);
_entries.Enqueue(entry);
while (_entries.Count > MaxEntries && _entries.TryDequeue(out _))
{
// drop oldest to bound memory
}
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,88 @@
using System.Collections.Concurrent;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
namespace StellaOps.IssuerDirectory.Infrastructure.InMemory;
/// <summary>
/// Deterministic in-memory issuer key store used as a Mongo replacement.
/// </summary>
internal sealed class InMemoryIssuerKeyRepository : IIssuerKeyRepository
{
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, IssuerKeyRecord>> _keys = new(StringComparer.Ordinal);
public Task<IssuerKeyRecord?> GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
var bucketKey = GetBucketKey(tenantId, issuerId);
if (_keys.TryGetValue(bucketKey, out var map) && map.TryGetValue(keyId, out var record))
{
return Task.FromResult<IssuerKeyRecord?>(record);
}
return Task.FromResult<IssuerKeyRecord?>(null);
}
public Task<IssuerKeyRecord?> GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
ArgumentException.ThrowIfNullOrWhiteSpace(fingerprint);
var bucketKey = GetBucketKey(tenantId, issuerId);
if (_keys.TryGetValue(bucketKey, out var map))
{
var match = map.Values.FirstOrDefault(key => string.Equals(key.Fingerprint, fingerprint, StringComparison.Ordinal));
return Task.FromResult<IssuerKeyRecord?>(match);
}
return Task.FromResult<IssuerKeyRecord?>(null);
}
public Task<IReadOnlyCollection<IssuerKeyRecord>> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
var bucketKey = GetBucketKey(tenantId, issuerId);
if (_keys.TryGetValue(bucketKey, out var map))
{
var ordered = map.Values.OrderBy(k => k.Id, StringComparer.Ordinal).ToArray();
return Task.FromResult<IReadOnlyCollection<IssuerKeyRecord>>(ordered);
}
return Task.FromResult<IReadOnlyCollection<IssuerKeyRecord>>(Array.Empty<IssuerKeyRecord>());
}
public Task<IReadOnlyCollection<IssuerKeyRecord>> ListGlobalAsync(string issuerId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
var all = _keys.Values
.SelectMany(dict => dict.Values)
.Where(k => string.Equals(k.IssuerId, issuerId, StringComparison.Ordinal))
.OrderBy(k => k.TenantId, StringComparer.Ordinal)
.ThenBy(k => k.Id, StringComparer.Ordinal)
.ToArray();
return Task.FromResult<IReadOnlyCollection<IssuerKeyRecord>>(all);
}
public Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var bucketKey = GetBucketKey(record.TenantId, record.IssuerId);
var map = _keys.GetOrAdd(bucketKey, _ => new ConcurrentDictionary<string, IssuerKeyRecord>(StringComparer.Ordinal));
map.AddOrUpdate(record.Id, record, (_, _) => record);
return Task.CompletedTask;
}
private static string GetBucketKey(string tenantId, string issuerId)
{
return $"{tenantId}|{issuerId}";
}
}

View File

@@ -0,0 +1,72 @@
using System.Collections.Concurrent;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
namespace StellaOps.IssuerDirectory.Infrastructure.InMemory;
/// <summary>
/// Deterministic in-memory issuer store used as a Mongo replacement.
/// </summary>
internal sealed class InMemoryIssuerRepository : IIssuerRepository
{
private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, IssuerRecord>> _issuers = new(StringComparer.Ordinal);
public Task<IssuerRecord?> GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
if (_issuers.TryGetValue(tenantId, out var map) && map.TryGetValue(issuerId, out var record))
{
return Task.FromResult<IssuerRecord?>(record);
}
return Task.FromResult<IssuerRecord?>(null);
}
public Task<IReadOnlyCollection<IssuerRecord>> ListAsync(string tenantId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
if (_issuers.TryGetValue(tenantId, out var map))
{
var ordered = map.Values.OrderBy(r => r.Id, StringComparer.Ordinal).ToArray();
return Task.FromResult<IReadOnlyCollection<IssuerRecord>>(ordered);
}
return Task.FromResult<IReadOnlyCollection<IssuerRecord>>(Array.Empty<IssuerRecord>());
}
public Task<IReadOnlyCollection<IssuerRecord>> ListGlobalAsync(CancellationToken cancellationToken)
{
var ordered = _issuers.Values
.SelectMany(dict => dict.Values)
.OrderBy(r => r.TenantId, StringComparer.Ordinal)
.ThenBy(r => r.Id, StringComparer.Ordinal)
.ToArray();
return Task.FromResult<IReadOnlyCollection<IssuerRecord>>(ordered);
}
public Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var tenantMap = _issuers.GetOrAdd(record.TenantId, _ => new ConcurrentDictionary<string, IssuerRecord>(StringComparer.Ordinal));
tenantMap.AddOrUpdate(record.Id, record, (_, _) => record);
return Task.CompletedTask;
}
public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
if (_issuers.TryGetValue(tenantId, out var map))
{
map.TryRemove(issuerId, out _);
}
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,42 @@
using System.Collections.Concurrent;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
namespace StellaOps.IssuerDirectory.Infrastructure.InMemory;
/// <summary>
/// Deterministic in-memory trust override store used as a Mongo replacement.
/// </summary>
internal sealed class InMemoryIssuerTrustRepository : IIssuerTrustRepository
{
private readonly ConcurrentDictionary<string, IssuerTrustOverrideRecord> _trust = new(StringComparer.Ordinal);
public Task<IssuerTrustOverrideRecord?> GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
var key = GetKey(tenantId, issuerId);
return Task.FromResult(_trust.TryGetValue(key, out var record) ? record : null);
}
public Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var key = GetKey(record.TenantId, record.IssuerId);
_trust[key] = record;
return Task.CompletedTask;
}
public Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(issuerId);
var key = GetKey(tenantId, issuerId);
_trust.TryRemove(key, out _);
return Task.CompletedTask;
}
private static string GetKey(string tenantId, string issuerId) => $"{tenantId}|{issuerId}";
}

View File

@@ -1,103 +0,0 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.IssuerDirectory.Infrastructure.Documents;
using StellaOps.IssuerDirectory.Infrastructure.Options;
namespace StellaOps.IssuerDirectory.Infrastructure.Internal;
/// <summary>
/// MongoDB context for Issuer Directory persistence.
/// </summary>
public sealed class IssuerDirectoryMongoContext
{
public IssuerDirectoryMongoContext(
IOptions<IssuerDirectoryMongoOptions> options,
ILogger<IssuerDirectoryMongoContext> logger)
{
ArgumentNullException.ThrowIfNull(options);
ArgumentNullException.ThrowIfNull(logger);
var value = options.Value ?? throw new InvalidOperationException("Mongo options must be provided.");
value.Validate();
var mongoUrl = new MongoUrl(value.ConnectionString);
var settings = MongoClientSettings.FromUrl(mongoUrl);
if (mongoUrl.UseTls is true && settings.SslSettings is not null)
{
settings.SslSettings.CheckCertificateRevocation = true;
}
var client = new MongoClient(settings);
var database = client.GetDatabase(value.Database);
logger.LogDebug("IssuerDirectory Mongo connected to {Database}", value.Database);
Issuers = database.GetCollection<IssuerDocument>(value.IssuersCollection);
IssuerKeys = database.GetCollection<IssuerKeyDocument>(value.IssuerKeysCollection);
IssuerTrustOverrides = database.GetCollection<IssuerTrustDocument>(value.IssuerTrustCollection);
Audits = database.GetCollection<IssuerAuditDocument>(value.AuditCollection);
EnsureIndexes().GetAwaiter().GetResult();
}
public IMongoCollection<IssuerDocument> Issuers { get; }
public IMongoCollection<IssuerKeyDocument> IssuerKeys { get; }
public IMongoCollection<IssuerTrustDocument> IssuerTrustOverrides { get; }
public IMongoCollection<IssuerAuditDocument> Audits { get; }
private async Task EnsureIndexes()
{
var tenantSlugIndex = new CreateIndexModel<IssuerDocument>(
Builders<IssuerDocument>.IndexKeys
.Ascending(document => document.TenantId)
.Ascending(document => document.Slug),
new CreateIndexOptions<IssuerDocument>
{
Name = "tenant_slug_unique",
Unique = true
});
await Issuers.Indexes.CreateOneAsync(tenantSlugIndex).ConfigureAwait(false);
var keyIndex = new CreateIndexModel<IssuerKeyDocument>(
Builders<IssuerKeyDocument>.IndexKeys
.Ascending(document => document.TenantId)
.Ascending(document => document.IssuerId)
.Ascending(document => document.Id),
new CreateIndexOptions<IssuerKeyDocument>
{
Name = "issuer_keys_unique",
Unique = true
});
var fingerprintIndex = new CreateIndexModel<IssuerKeyDocument>(
Builders<IssuerKeyDocument>.IndexKeys
.Ascending(document => document.TenantId)
.Ascending(document => document.IssuerId)
.Ascending(document => document.Fingerprint),
new CreateIndexOptions<IssuerKeyDocument>
{
Name = "issuer_keys_fingerprint",
Unique = true
});
await IssuerKeys.Indexes.CreateOneAsync(keyIndex).ConfigureAwait(false);
await IssuerKeys.Indexes.CreateOneAsync(fingerprintIndex).ConfigureAwait(false);
var trustIndex = new CreateIndexModel<IssuerTrustDocument>(
Builders<IssuerTrustDocument>.IndexKeys
.Ascending(document => document.TenantId)
.Ascending(document => document.IssuerId),
new CreateIndexOptions<IssuerTrustDocument>
{
Name = "issuer_trust_unique",
Unique = true
});
await IssuerTrustOverrides.Indexes.CreateOneAsync(trustIndex).ConfigureAwait(false);
}
}

View File

@@ -1,54 +0,0 @@
namespace StellaOps.IssuerDirectory.Infrastructure.Options;
/// <summary>
/// Mongo persistence configuration for the Issuer Directory service.
/// </summary>
public sealed class IssuerDirectoryMongoOptions
{
public const string SectionName = "IssuerDirectory:Mongo";
public string ConnectionString { get; set; } = "mongodb://localhost:27017";
public string Database { get; set; } = "issuer-directory";
public string IssuersCollection { get; set; } = "issuers";
public string IssuerKeysCollection { get; set; } = "issuer_keys";
public string IssuerTrustCollection { get; set; } = "issuer_trust_overrides";
public string AuditCollection { get; set; } = "issuer_audit";
public void Validate()
{
if (string.IsNullOrWhiteSpace(ConnectionString))
{
throw new InvalidOperationException("IssuerDirectory Mongo connection string must be configured.");
}
if (string.IsNullOrWhiteSpace(Database))
{
throw new InvalidOperationException("IssuerDirectory Mongo database must be configured.");
}
if (string.IsNullOrWhiteSpace(IssuersCollection))
{
throw new InvalidOperationException("IssuerDirectory Mongo issuers collection must be configured.");
}
if (string.IsNullOrWhiteSpace(IssuerKeysCollection))
{
throw new InvalidOperationException("IssuerDirectory Mongo issuer keys collection must be configured.");
}
if (string.IsNullOrWhiteSpace(IssuerTrustCollection))
{
throw new InvalidOperationException("IssuerDirectory Mongo issuer trust collection must be configured.");
}
if (string.IsNullOrWhiteSpace(AuditCollection))
{
throw new InvalidOperationException("IssuerDirectory Mongo audit collection must be configured.");
}
}
}

View File

@@ -1,131 +0,0 @@
using MongoDB.Driver;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
using StellaOps.IssuerDirectory.Infrastructure.Documents;
using StellaOps.IssuerDirectory.Infrastructure.Internal;
namespace StellaOps.IssuerDirectory.Infrastructure.Repositories;
public sealed class MongoIssuerKeyRepository : IIssuerKeyRepository
{
private readonly IssuerDirectoryMongoContext _context;
public MongoIssuerKeyRepository(IssuerDirectoryMongoContext context)
{
_context = context ?? throw new ArgumentNullException(nameof(context));
}
public async Task<IssuerKeyRecord?> GetAsync(string tenantId, string issuerId, string keyId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerKeyDocument>.Filter.And(
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.IssuerId, issuerId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.Id, keyId));
var document = await _context.IssuerKeys.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : MapToDomain(document);
}
public async Task<IssuerKeyRecord?> GetByFingerprintAsync(string tenantId, string issuerId, string fingerprint, CancellationToken cancellationToken)
{
var filter = Builders<IssuerKeyDocument>.Filter.And(
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.IssuerId, issuerId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.Fingerprint, fingerprint));
var document = await _context.IssuerKeys.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : MapToDomain(document);
}
public async Task<IReadOnlyCollection<IssuerKeyRecord>> ListAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerKeyDocument>.Filter.And(
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.IssuerId, issuerId));
var documents = await _context.IssuerKeys
.Find(filter)
.SortBy(document => document.CreatedAtUtc)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(MapToDomain).ToArray();
}
public async Task<IReadOnlyCollection<IssuerKeyRecord>> ListGlobalAsync(string issuerId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerKeyDocument>.Filter.And(
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.TenantId, IssuerTenants.Global),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.IssuerId, issuerId));
var documents = await _context.IssuerKeys
.Find(filter)
.SortBy(document => document.CreatedAtUtc)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(MapToDomain).ToArray();
}
public async Task UpsertAsync(IssuerKeyRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var document = MapToDocument(record);
var filter = Builders<IssuerKeyDocument>.Filter.And(
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.TenantId, record.TenantId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.IssuerId, record.IssuerId),
Builders<IssuerKeyDocument>.Filter.Eq(doc => doc.Id, record.Id));
await _context.IssuerKeys.ReplaceOneAsync(
filter,
document,
new ReplaceOptions { IsUpsert = true },
cancellationToken).ConfigureAwait(false);
}
private static IssuerKeyRecord MapToDomain(IssuerKeyDocument document)
{
return new IssuerKeyRecord
{
Id = document.Id,
IssuerId = document.IssuerId,
TenantId = document.TenantId,
Type = Enum.Parse<IssuerKeyType>(document.Type, ignoreCase: true),
Status = Enum.Parse<IssuerKeyStatus>(document.Status, ignoreCase: true),
Material = new IssuerKeyMaterial(document.MaterialFormat, document.MaterialValue),
Fingerprint = document.Fingerprint,
CreatedAtUtc = document.CreatedAtUtc,
CreatedBy = document.CreatedBy,
UpdatedAtUtc = document.UpdatedAtUtc,
UpdatedBy = document.UpdatedBy,
ExpiresAtUtc = document.ExpiresAtUtc,
RetiredAtUtc = document.RetiredAtUtc,
RevokedAtUtc = document.RevokedAtUtc,
ReplacesKeyId = document.ReplacesKeyId
};
}
private static IssuerKeyDocument MapToDocument(IssuerKeyRecord record)
{
return new IssuerKeyDocument
{
Id = record.Id,
IssuerId = record.IssuerId,
TenantId = record.TenantId,
Type = record.Type.ToString(),
Status = record.Status.ToString(),
MaterialFormat = record.Material.Format,
MaterialValue = record.Material.Value,
Fingerprint = record.Fingerprint,
CreatedAtUtc = record.CreatedAtUtc,
CreatedBy = record.CreatedBy,
UpdatedAtUtc = record.UpdatedAtUtc,
UpdatedBy = record.UpdatedBy,
ExpiresAtUtc = record.ExpiresAtUtc,
RetiredAtUtc = record.RetiredAtUtc,
RevokedAtUtc = record.RevokedAtUtc,
ReplacesKeyId = record.ReplacesKeyId
};
}
}

View File

@@ -1,177 +0,0 @@
using MongoDB.Driver;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
using StellaOps.IssuerDirectory.Infrastructure.Documents;
using StellaOps.IssuerDirectory.Infrastructure.Internal;
namespace StellaOps.IssuerDirectory.Infrastructure.Repositories;
public sealed class MongoIssuerRepository : IIssuerRepository
{
private readonly IssuerDirectoryMongoContext _context;
public MongoIssuerRepository(IssuerDirectoryMongoContext context)
{
_context = context ?? throw new ArgumentNullException(nameof(context));
}
public async Task<IssuerRecord?> GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerDocument>.Filter.And(
Builders<IssuerDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerDocument>.Filter.Eq(doc => doc.Id, issuerId));
var cursor = await _context.Issuers
.Find(filter)
.Limit(1)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return cursor is null ? null : MapToDomain(cursor);
}
public async Task<IReadOnlyCollection<IssuerRecord>> ListAsync(string tenantId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerDocument>.Filter.Eq(doc => doc.TenantId, tenantId);
var documents = await _context.Issuers.Find(filter)
.SortBy(doc => doc.Slug)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(MapToDomain).ToArray();
}
public async Task<IReadOnlyCollection<IssuerRecord>> ListGlobalAsync(CancellationToken cancellationToken)
{
var documents = await _context.Issuers
.Find(doc => doc.TenantId == IssuerTenants.Global)
.SortBy(doc => doc.Slug)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(MapToDomain).ToArray();
}
public async Task UpsertAsync(IssuerRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var document = MapToDocument(record);
var filter = Builders<IssuerDocument>.Filter.And(
Builders<IssuerDocument>.Filter.Eq(doc => doc.TenantId, record.TenantId),
Builders<IssuerDocument>.Filter.Eq(doc => doc.Id, record.Id));
await _context.Issuers
.ReplaceOneAsync(
filter,
document,
new ReplaceOptions { IsUpsert = true },
cancellationToken)
.ConfigureAwait(false);
}
public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerDocument>.Filter.And(
Builders<IssuerDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerDocument>.Filter.Eq(doc => doc.Id, issuerId));
await _context.Issuers.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
}
private static IssuerRecord MapToDomain(IssuerDocument document)
{
var contact = new IssuerContact(
document.Contact.Email,
document.Contact.Phone,
string.IsNullOrWhiteSpace(document.Contact.Website) ? null : new Uri(document.Contact.Website),
document.Contact.Timezone);
var metadata = new IssuerMetadata(
document.Metadata.CveOrgId,
document.Metadata.CsafPublisherId,
string.IsNullOrWhiteSpace(document.Metadata.SecurityAdvisoriesUrl)
? null
: new Uri(document.Metadata.SecurityAdvisoriesUrl),
string.IsNullOrWhiteSpace(document.Metadata.CatalogUrl)
? null
: new Uri(document.Metadata.CatalogUrl),
document.Metadata.Languages,
document.Metadata.Attributes);
var endpoints = document.Endpoints
.Select(endpoint => new IssuerEndpoint(
endpoint.Kind,
new Uri(endpoint.Url),
endpoint.Format,
endpoint.RequiresAuthentication))
.ToArray();
return new IssuerRecord
{
Id = document.Id,
TenantId = document.TenantId,
DisplayName = document.DisplayName,
Slug = document.Slug,
Description = document.Description,
Contact = contact,
Metadata = metadata,
Endpoints = endpoints,
Tags = document.Tags,
CreatedAtUtc = document.CreatedAtUtc,
CreatedBy = document.CreatedBy,
UpdatedAtUtc = document.UpdatedAtUtc,
UpdatedBy = document.UpdatedBy,
IsSystemSeed = document.IsSystemSeed
};
}
private static IssuerDocument MapToDocument(IssuerRecord record)
{
var contact = new IssuerContactDocument
{
Email = record.Contact.Email,
Phone = record.Contact.Phone,
Website = record.Contact.Website?.ToString(),
Timezone = record.Contact.Timezone
};
var metadataDocument = new IssuerMetadataDocument
{
CveOrgId = record.Metadata.CveOrgId,
CsafPublisherId = record.Metadata.CsafPublisherId,
SecurityAdvisoriesUrl = record.Metadata.SecurityAdvisoriesUrl?.ToString(),
CatalogUrl = record.Metadata.CatalogUrl?.ToString(),
Languages = record.Metadata.SupportedLanguages.ToList(),
Attributes = new Dictionary<string, string>(record.Metadata.Attributes, StringComparer.OrdinalIgnoreCase)
};
var endpoints = record.Endpoints
.Select(endpoint => new IssuerEndpointDocument
{
Kind = endpoint.Kind,
Url = endpoint.Url.ToString(),
Format = endpoint.Format,
RequiresAuthentication = endpoint.RequiresAuthentication
})
.ToList();
return new IssuerDocument
{
Id = record.Id,
TenantId = record.TenantId,
DisplayName = record.DisplayName,
Slug = record.Slug,
Description = record.Description,
Contact = contact,
Metadata = metadataDocument,
Endpoints = endpoints,
Tags = record.Tags.ToList(),
CreatedAtUtc = record.CreatedAtUtc,
CreatedBy = record.CreatedBy,
UpdatedAtUtc = record.UpdatedAtUtc,
UpdatedBy = record.UpdatedBy,
IsSystemSeed = record.IsSystemSeed
};
}
}

View File

@@ -1,88 +0,0 @@
using System.Globalization;
using MongoDB.Driver;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Core.Domain;
using StellaOps.IssuerDirectory.Infrastructure.Documents;
using StellaOps.IssuerDirectory.Infrastructure.Internal;
namespace StellaOps.IssuerDirectory.Infrastructure.Repositories;
public sealed class MongoIssuerTrustRepository : IIssuerTrustRepository
{
private readonly IssuerDirectoryMongoContext _context;
public MongoIssuerTrustRepository(IssuerDirectoryMongoContext context)
{
_context = context ?? throw new ArgumentNullException(nameof(context));
}
public async Task<IssuerTrustOverrideRecord?> GetAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerTrustDocument>.Filter.And(
Builders<IssuerTrustDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerTrustDocument>.Filter.Eq(doc => doc.IssuerId, issuerId));
var document = await _context.IssuerTrustOverrides
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return document is null ? null : MapToDomain(document);
}
public async Task UpsertAsync(IssuerTrustOverrideRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var document = MapToDocument(record);
var filter = Builders<IssuerTrustDocument>.Filter.And(
Builders<IssuerTrustDocument>.Filter.Eq(doc => doc.TenantId, record.TenantId),
Builders<IssuerTrustDocument>.Filter.Eq(doc => doc.IssuerId, record.IssuerId));
await _context.IssuerTrustOverrides.ReplaceOneAsync(
filter,
document,
new ReplaceOptions { IsUpsert = true },
cancellationToken).ConfigureAwait(false);
}
public async Task DeleteAsync(string tenantId, string issuerId, CancellationToken cancellationToken)
{
var filter = Builders<IssuerTrustDocument>.Filter.And(
Builders<IssuerTrustDocument>.Filter.Eq(doc => doc.TenantId, tenantId),
Builders<IssuerTrustDocument>.Filter.Eq(doc => doc.IssuerId, issuerId));
await _context.IssuerTrustOverrides.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
}
private static IssuerTrustOverrideRecord MapToDomain(IssuerTrustDocument document)
{
return new IssuerTrustOverrideRecord
{
IssuerId = document.IssuerId,
TenantId = document.TenantId,
Weight = document.Weight,
Reason = document.Reason,
CreatedAtUtc = document.CreatedAtUtc,
CreatedBy = document.CreatedBy,
UpdatedAtUtc = document.UpdatedAtUtc,
UpdatedBy = document.UpdatedBy
};
}
private static IssuerTrustDocument MapToDocument(IssuerTrustOverrideRecord record)
{
return new IssuerTrustDocument
{
Id = string.Create(CultureInfo.InvariantCulture, $"{record.TenantId}:{record.IssuerId}"),
IssuerId = record.IssuerId,
TenantId = record.TenantId,
Weight = record.Weight,
Reason = record.Reason,
CreatedAtUtc = record.CreatedAtUtc,
CreatedBy = record.CreatedBy,
UpdatedAtUtc = record.UpdatedAtUtc,
UpdatedBy = record.UpdatedBy
};
}
}

View File

@@ -1,10 +1,7 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.IssuerDirectory.Core.Abstractions;
using StellaOps.IssuerDirectory.Infrastructure.Audit;
using StellaOps.IssuerDirectory.Infrastructure.Internal;
using StellaOps.IssuerDirectory.Infrastructure.Options;
using StellaOps.IssuerDirectory.Infrastructure.Repositories;
using StellaOps.IssuerDirectory.Infrastructure.InMemory;
namespace StellaOps.IssuerDirectory.Infrastructure;
@@ -17,19 +14,10 @@ public static class ServiceCollectionExtensions
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddOptions<IssuerDirectoryMongoOptions>()
.Bind(configuration.GetSection(IssuerDirectoryMongoOptions.SectionName))
.Validate(options =>
{
options.Validate();
return true;
});
services.AddSingleton<IssuerDirectoryMongoContext>();
services.AddSingleton<IIssuerRepository, MongoIssuerRepository>();
services.AddSingleton<IIssuerKeyRepository, MongoIssuerKeyRepository>();
services.AddSingleton<IIssuerTrustRepository, MongoIssuerTrustRepository>();
services.AddSingleton<IIssuerAuditSink, MongoIssuerAuditSink>();
services.AddSingleton<IIssuerRepository, InMemoryIssuerRepository>();
services.AddSingleton<IIssuerKeyRepository, InMemoryIssuerKeyRepository>();
services.AddSingleton<IIssuerTrustRepository, InMemoryIssuerTrustRepository>();
services.AddSingleton<IIssuerAuditSink, InMemoryIssuerAuditSink>();
return services;
}

View File

@@ -11,8 +11,6 @@
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
<PackageReference Include="MongoDB.Bson" Version="3.5.0" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.IssuerDirectory.Core\\StellaOps.IssuerDirectory.Core.csproj" />

View File

@@ -121,7 +121,7 @@ static void ConfigurePersistence(
WebApplicationBuilder builder,
IssuerDirectoryWebServiceOptions options)
{
var provider = options.Persistence.Provider?.Trim().ToLowerInvariant() ?? "mongo";
var provider = options.Persistence.Provider?.Trim().ToLowerInvariant() ?? "postgres";
if (provider == "postgres")
{
@@ -134,7 +134,7 @@ static void ConfigurePersistence(
}
else
{
Log.Information("Using MongoDB persistence for IssuerDirectory.");
Log.Information("Using in-memory persistence for IssuerDirectory (non-production).");
builder.Services.AddIssuerDirectoryInfrastructure(builder.Configuration);
}
}

View File

@@ -114,7 +114,7 @@ internal static class NativeReachabilityGraphBuilder
.ToImmutableArray();
var distinctEdges = edges
.GroupBy(e => (e.From, e.To, e.Reason), ValueTuple.Create)
.GroupBy(e => (e.From, e.To, e.Reason))
.Select(g => g.First())
.OrderBy(e => e.From, StringComparer.Ordinal)
.ThenBy(e => e.To, StringComparer.Ordinal)

View File

@@ -44,7 +44,9 @@ public sealed class ReachabilityGraphBuilder
string? display = null,
string? sourceFile = null,
int? sourceLine = null,
IReadOnlyDictionary<string, string>? attributes = null)
IReadOnlyDictionary<string, string>? attributes = null,
string? purl = null,
string? symbolDigest = null)
{
if (string.IsNullOrWhiteSpace(symbolId))
{
@@ -59,7 +61,9 @@ public sealed class ReachabilityGraphBuilder
display?.Trim(),
sourceFile?.Trim(),
sourceLine,
attributes?.ToImmutableSortedDictionary(StringComparer.Ordinal) ?? ImmutableSortedDictionary<string, string>.Empty);
attributes?.ToImmutableSortedDictionary(StringComparer.Ordinal) ?? ImmutableSortedDictionary<string, string>.Empty,
purl?.Trim(),
symbolDigest?.Trim());
_richNodes[id] = node;
nodes.Add(id);
@@ -93,6 +97,9 @@ public sealed class ReachabilityGraphBuilder
/// <param name="origin">Origin: static or runtime.</param>
/// <param name="provenance">Provenance hint: jvm-bytecode, il, ts-ast, ssa, ebpf, etw, jfr, hook.</param>
/// <param name="evidence">Evidence locator (e.g., "file:path:line").</param>
/// <param name="purl">PURL of the component that defines the callee.</param>
/// <param name="symbolDigest">Stable hash of the normalized callee signature.</param>
/// <param name="candidates">Ranked candidate purls when resolution is ambiguous.</param>
public ReachabilityGraphBuilder AddEdge(
string from,
string to,
@@ -100,7 +107,10 @@ public sealed class ReachabilityGraphBuilder
EdgeConfidence confidence,
string origin = "static",
string? provenance = null,
string? evidence = null)
string? evidence = null,
string? purl = null,
string? symbolDigest = null,
IReadOnlyList<(string Purl, string? SymbolDigest, double? Score)>? candidates = null)
{
if (string.IsNullOrWhiteSpace(from) || string.IsNullOrWhiteSpace(to))
{
@@ -118,7 +128,10 @@ public sealed class ReachabilityGraphBuilder
confidence,
origin?.Trim() ?? "static",
provenance?.Trim(),
evidence?.Trim());
evidence?.Trim(),
purl?.Trim(),
symbolDigest?.Trim(),
candidates);
_richEdges.Add(richEdge);
nodes.Add(fromId);
@@ -172,7 +185,9 @@ public sealed class ReachabilityGraphBuilder
rich.Kind,
rich.Display,
source,
rich.Attributes.Count > 0 ? rich.Attributes : null));
rich.Attributes.Count > 0 ? rich.Attributes : null,
rich.Purl,
rich.SymbolDigest));
}
else
{
@@ -199,12 +214,17 @@ public sealed class ReachabilityGraphBuilder
rich.Provenance,
rich.Evidence);
var candidates = rich.Candidates?.Select(c => new ReachabilityEdgeCandidate(c.Purl, c.SymbolDigest, c.Score)).ToList();
edgeList.Add(new ReachabilityUnionEdge(
rich.From,
rich.To,
rich.EdgeType,
ConfidenceToString(rich.Confidence),
source));
source,
rich.Purl,
rich.SymbolDigest,
candidates));
}
// Add any legacy edges not already covered
@@ -315,7 +335,9 @@ public sealed class ReachabilityGraphBuilder
string? Display,
string? SourceFile,
int? SourceLine,
ImmutableSortedDictionary<string, string> Attributes);
ImmutableSortedDictionary<string, string> Attributes,
string? Purl = null,
string? SymbolDigest = null);
private sealed record RichEdge(
string From,
@@ -324,7 +346,10 @@ public sealed class ReachabilityGraphBuilder
EdgeConfidence Confidence,
string Origin,
string? Provenance,
string? Evidence);
string? Evidence,
string? Purl = null,
string? SymbolDigest = null,
IReadOnlyList<(string Purl, string? SymbolDigest, double? Score)>? Candidates = null);
}
/// <summary>

View File

@@ -15,6 +15,7 @@ public interface IRichGraphPublisher
/// <summary>
/// Packages richgraph-v1 JSON + meta into a deterministic zip and stores it in CAS.
/// CAS paths follow the richgraph-v1 contract: cas://reachability/graphs/{blake3}
/// </summary>
public sealed class ReachabilityRichGraphPublisher : IRichGraphPublisher
{
@@ -45,11 +46,20 @@ public sealed class ReachabilityRichGraphPublisher : IRichGraphPublisher
var zipPath = Path.Combine(folder, "richgraph.zip");
CreateDeterministicZip(folder, zipPath);
// Use BLAKE3 graph_hash as the CAS key per CONTRACT-RICHGRAPH-V1-015
var casKey = ExtractHashDigest(writeResult.GraphHash);
await using var stream = File.OpenRead(zipPath);
var sha = ComputeSha256(zipPath);
var casEntry = await cas.PutAsync(new FileCasPutRequest(sha, stream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
var casEntry = await cas.PutAsync(new FileCasPutRequest(casKey, stream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
return new RichGraphPublishResult(writeResult.GraphHash, casEntry.RelativePath, writeResult.NodeCount, writeResult.EdgeCount);
// Build CAS URI per contract: cas://reachability/graphs/{blake3}
var casUri = $"cas://reachability/graphs/{casKey}";
return new RichGraphPublishResult(
writeResult.GraphHash,
casEntry.RelativePath,
casUri,
writeResult.NodeCount,
writeResult.EdgeCount);
}
private static void CreateDeterministicZip(string sourceDir, string destinationZip)
@@ -71,16 +81,19 @@ public sealed class ReachabilityRichGraphPublisher : IRichGraphPublisher
}
}
private static string ComputeSha256(string path)
/// <summary>
/// Extracts the hex digest from a prefixed hash (e.g., "blake3:abc123" → "abc123").
/// </summary>
private static string ExtractHashDigest(string prefixedHash)
{
using var sha = System.Security.Cryptography.SHA256.Create();
using var stream = File.OpenRead(path);
return Convert.ToHexString(sha.ComputeHash(stream)).ToLowerInvariant();
var colonIndex = prefixedHash.IndexOf(':');
return colonIndex >= 0 ? prefixedHash[(colonIndex + 1)..] : prefixedHash;
}
}
public sealed record RichGraphPublishResult(
string GraphHash,
string RelativePath,
string CasUri,
int NodeCount,
int EdgeCount);

View File

@@ -75,7 +75,9 @@ public sealed class ReachabilityUnionWriter
Source = n.Source?.Trimmed(),
Attributes = (n.Attributes ?? ImmutableDictionary<string, string>.Empty)
.Where(kv => !string.IsNullOrWhiteSpace(kv.Key) && kv.Value is not null)
.ToImmutableSortedDictionary(kv => kv.Key.Trim(), kv => kv.Value!.Trim())
.ToImmutableSortedDictionary(kv => kv.Key.Trim(), kv => kv.Value!.Trim()),
Purl = Trim(n.Purl),
SymbolDigest = Trim(n.SymbolDigest)
})
.OrderBy(n => n.SymbolId, StringComparer.Ordinal)
.ThenBy(n => n.Kind, StringComparer.Ordinal)
@@ -89,7 +91,10 @@ public sealed class ReachabilityUnionWriter
To = Trim(e.To)!,
EdgeType = Trim(e.EdgeType) ?? "call",
Confidence = Trim(e.Confidence) ?? "certain",
Source = e.Source?.Trimmed()
Source = e.Source?.Trimmed(),
Purl = Trim(e.Purl),
SymbolDigest = Trim(e.SymbolDigest),
Candidates = NormalizeCandidates(e.Candidates)
})
.OrderBy(e => e.From, StringComparer.Ordinal)
.ThenBy(e => e.To, StringComparer.Ordinal)
@@ -110,6 +115,24 @@ public sealed class ReachabilityUnionWriter
return new NormalizedGraph(nodes, edges, facts);
}
private static IReadOnlyList<ReachabilityEdgeCandidate>? NormalizeCandidates(IReadOnlyList<ReachabilityEdgeCandidate>? candidates)
{
if (candidates is null || candidates.Count == 0)
{
return null;
}
return candidates
.Where(c => !string.IsNullOrWhiteSpace(c.Purl))
.Select(c => new ReachabilityEdgeCandidate(
c.Purl.Trim(),
Trim(c.SymbolDigest),
c.Score))
.OrderByDescending(c => c.Score ?? 0)
.ThenBy(c => c.Purl, StringComparer.Ordinal)
.ToList();
}
private static async Task<FileHashInfo> WriteNdjsonAsync<T>(
string path,
IReadOnlyCollection<T> items,
@@ -145,6 +168,16 @@ public sealed class ReachabilityUnionWriter
jw.WriteString("display", node.Display);
}
if (!string.IsNullOrWhiteSpace(node.Purl))
{
jw.WriteString("purl", node.Purl);
}
if (!string.IsNullOrWhiteSpace(node.SymbolDigest))
{
jw.WriteString("symbol_digest", node.SymbolDigest);
}
if (node.Source is not null)
{
jw.WritePropertyName("source");
@@ -180,6 +213,37 @@ public sealed class ReachabilityUnionWriter
jw.WriteString("edge_type", edge.EdgeType);
jw.WriteString("confidence", edge.Confidence);
if (!string.IsNullOrWhiteSpace(edge.Purl))
{
jw.WriteString("purl", edge.Purl);
}
if (!string.IsNullOrWhiteSpace(edge.SymbolDigest))
{
jw.WriteString("symbol_digest", edge.SymbolDigest);
}
if (edge.Candidates is { Count: > 0 })
{
jw.WritePropertyName("candidates");
jw.WriteStartArray();
foreach (var candidate in edge.Candidates)
{
jw.WriteStartObject();
jw.WriteString("purl", candidate.Purl);
if (!string.IsNullOrWhiteSpace(candidate.SymbolDigest))
{
jw.WriteString("symbol_digest", candidate.SymbolDigest);
}
if (candidate.Score.HasValue)
{
jw.WriteNumber("score", candidate.Score.Value);
}
jw.WriteEndObject();
}
jw.WriteEndArray();
}
if (edge.Source is not null)
{
jw.WritePropertyName("source");
@@ -327,14 +391,27 @@ public sealed record ReachabilityUnionNode(
string Kind,
string? Display = null,
ReachabilitySource? Source = null,
IReadOnlyDictionary<string, string>? Attributes = null);
IReadOnlyDictionary<string, string>? Attributes = null,
string? Purl = null,
string? SymbolDigest = null);
public sealed record ReachabilityUnionEdge(
string From,
string To,
string EdgeType,
string? Confidence = "certain",
ReachabilitySource? Source = null);
ReachabilitySource? Source = null,
string? Purl = null,
string? SymbolDigest = null,
IReadOnlyList<ReachabilityEdgeCandidate>? Candidates = null);
/// <summary>
/// Represents a candidate purl+digest when callee resolution is ambiguous.
/// </summary>
public sealed record ReachabilityEdgeCandidate(
string Purl,
string? SymbolDigest = null,
double? Score = null);
public sealed record ReachabilityRuntimeFact(
string SymbolId,

View File

@@ -38,4 +38,132 @@ public class ReachabilityUnionWriterTests
var nodeLines = await File.ReadAllLinesAsync(Path.Combine(temp.Path, "reachability_graphs/analysis-x/nodes.ndjson"));
Assert.Contains(nodeLines, l => l.Contains("sym:dotnet:A"));
}
[Fact]
public async Task WritesNodePurlAndSymbolDigest()
{
var writer = new ReachabilityUnionWriter();
using var temp = new TempDir();
var graph = new ReachabilityUnionGraph(
Nodes: new[]
{
new ReachabilityUnionNode(
"sym:dotnet:A",
"dotnet",
"method",
"TestMethod",
null,
null,
Purl: "pkg:nuget/TestPackage@1.0.0",
SymbolDigest: "sha256:abc123")
},
Edges: Array.Empty<ReachabilityUnionEdge>());
var result = await writer.WriteAsync(graph, temp.Path, "analysis-purl");
var nodeLines = await File.ReadAllLinesAsync(result.Nodes.Path);
Assert.Single(nodeLines);
Assert.Contains("\"purl\":\"pkg:nuget/TestPackage@1.0.0\"", nodeLines[0]);
Assert.Contains("\"symbol_digest\":\"sha256:abc123\"", nodeLines[0]);
}
[Fact]
public async Task WritesEdgePurlAndSymbolDigest()
{
var writer = new ReachabilityUnionWriter();
using var temp = new TempDir();
var graph = new ReachabilityUnionGraph(
Nodes: new[]
{
new ReachabilityUnionNode("sym:dotnet:A", "dotnet", "method"),
new ReachabilityUnionNode("sym:dotnet:B", "dotnet", "method")
},
Edges: new[]
{
new ReachabilityUnionEdge(
"sym:dotnet:A",
"sym:dotnet:B",
"call",
"high",
null,
Purl: "pkg:nuget/TargetPackage@2.0.0",
SymbolDigest: "sha256:def456")
});
var result = await writer.WriteAsync(graph, temp.Path, "analysis-edge-purl");
var edgeLines = await File.ReadAllLinesAsync(result.Edges.Path);
Assert.Single(edgeLines);
Assert.Contains("\"purl\":\"pkg:nuget/TargetPackage@2.0.0\"", edgeLines[0]);
Assert.Contains("\"symbol_digest\":\"sha256:def456\"", edgeLines[0]);
}
[Fact]
public async Task WritesEdgeCandidates()
{
var writer = new ReachabilityUnionWriter();
using var temp = new TempDir();
var graph = new ReachabilityUnionGraph(
Nodes: new[]
{
new ReachabilityUnionNode("sym:binary:main", "binary", "function"),
new ReachabilityUnionNode("sym:binary:openssl_connect", "binary", "function")
},
Edges: new[]
{
new ReachabilityUnionEdge(
"sym:binary:main",
"sym:binary:openssl_connect",
"call",
"medium",
null,
Purl: null,
SymbolDigest: null,
Candidates: new List<ReachabilityEdgeCandidate>
{
new("pkg:deb/ubuntu/openssl@3.0.2", "sha256:abc", 0.8),
new("pkg:deb/debian/openssl@3.0.2", "sha256:def", 0.6)
})
});
var result = await writer.WriteAsync(graph, temp.Path, "analysis-candidates");
var edgeLines = await File.ReadAllLinesAsync(result.Edges.Path);
Assert.Single(edgeLines);
Assert.Contains("\"candidates\":", edgeLines[0]);
Assert.Contains("pkg:deb/ubuntu/openssl@3.0.2", edgeLines[0]);
Assert.Contains("pkg:deb/debian/openssl@3.0.2", edgeLines[0]);
Assert.Contains("\"score\":0.8", edgeLines[0]);
}
[Fact]
public async Task OmitsPurlAndSymbolDigestWhenNull()
{
var writer = new ReachabilityUnionWriter();
using var temp = new TempDir();
var graph = new ReachabilityUnionGraph(
Nodes: new[]
{
new ReachabilityUnionNode("sym:dotnet:A", "dotnet", "method")
},
Edges: new[]
{
new ReachabilityUnionEdge("sym:dotnet:A", "sym:dotnet:A", "call")
});
var result = await writer.WriteAsync(graph, temp.Path, "analysis-null-purl");
var nodeLines = await File.ReadAllLinesAsync(result.Nodes.Path);
Assert.DoesNotContain("purl", nodeLines[0]);
Assert.DoesNotContain("symbol_digest", nodeLines[0]);
var edgeLines = await File.ReadAllLinesAsync(result.Edges.Path);
Assert.DoesNotContain("purl", edgeLines[0]);
Assert.DoesNotContain("symbol_digest", edgeLines[0]);
Assert.DoesNotContain("candidates", edgeLines[0]);
}
}

View File

@@ -1,4 +1,5 @@
using System.Threading.Tasks;
using StellaOps.Cryptography;
using StellaOps.Scanner.Reachability;
using Xunit;
@@ -9,7 +10,7 @@ public class RichGraphPublisherTests
[Fact]
public async Task PublishesGraphToCas()
{
var writer = new RichGraphWriter();
var writer = new RichGraphWriter(CryptoHashFactory.CreateDefault());
var publisher = new ReachabilityRichGraphPublisher(writer);
var cas = new FakeFileContentAddressableStore();
@@ -21,7 +22,8 @@ public class RichGraphPublisherTests
var rich = RichGraphBuilder.FromUnion(union, "test", "1.0.0");
var result = await publisher.PublishAsync(rich, "scan-1", cas, temp.Path);
Assert.StartsWith("sha256:", result.GraphHash);
Assert.Contains(":", result.GraphHash); // hash format: algorithm:digest
Assert.StartsWith("cas://reachability/graphs/", result.CasUri);
Assert.Equal(1, result.NodeCount);
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading.Tasks;
using StellaOps.Cryptography;
using StellaOps.Scanner.Reachability;
using Xunit;
@@ -10,7 +11,7 @@ public class RichGraphWriterTests
[Fact]
public async Task WritesCanonicalGraphAndMeta()
{
var writer = new RichGraphWriter();
var writer = new RichGraphWriter(CryptoHashFactory.CreateDefault());
using var temp = new TempDir();
var union = new ReachabilityUnionGraph(
@@ -31,7 +32,7 @@ public class RichGraphWriterTests
Assert.True(File.Exists(result.MetaPath));
var json = await File.ReadAllTextAsync(result.GraphPath);
Assert.Contains("richgraph-v1", json);
Assert.StartsWith("sha256:", result.GraphHash);
Assert.Contains(":", result.GraphHash); // hash format: algorithm:digest
Assert.Equal(2, result.NodeCount);
Assert.Equal(1, result.EdgeCount);
}