audit work, fixed StellaOps.sln warnings/errors, fixed tests, sprints work, new advisories

This commit is contained in:
master
2026-01-07 18:49:59 +02:00
parent 04ec098046
commit 608a7f85c0
866 changed files with 56323 additions and 6231 deletions

View File

@@ -4,6 +4,7 @@
// Task: T5 — Backend export service for audit packs
// -----------------------------------------------------------------------------
using System.Globalization;
using System.IO.Compression;
using System.Text.Json;
using StellaOps.AuditPack.Models;
@@ -132,7 +133,7 @@ public sealed class AuditPackExportService : IAuditPackExportService
{
var exportDoc = new Dictionary<string, object>
{
["exportedAt"] = _timeProvider.GetUtcNow().ToString("O"),
["exportedAt"] = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
["scanId"] = request.ScanId,
["format"] = "json",
["version"] = "1.0"

View File

@@ -9,10 +9,6 @@
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" />
<PackageReference Include="FluentAssertions" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Security.Cryptography;
@@ -143,14 +144,14 @@ public sealed class DefaultCryptoProvider : ICryptoProvider, ICryptoProviderDiag
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["kind"] = key.Kind.ToString(),
["createdAt"] = key.CreatedAt.UtcDateTime.ToString("O"),
["createdAt"] = key.CreatedAt.UtcDateTime.ToString("O", CultureInfo.InvariantCulture),
["providerHint"] = key.Reference.ProviderHint,
["provider"] = Name
};
if (key.ExpiresAt.HasValue)
{
metadata["expiresAt"] = key.ExpiresAt.Value.UtcDateTime.ToString("O");
metadata["expiresAt"] = key.ExpiresAt.Value.UtcDateTime.ToString("O", CultureInfo.InvariantCulture);
}
foreach (var pair in key.Metadata)

View File

@@ -0,0 +1,24 @@
# Eventing Library Charter
## Mission
- Provide event envelope models and deterministic serialization helpers.
## Responsibilities
- Define event envelope schemas and validation helpers.
- Provide serialization/parsing utilities with invariant culture.
- Maintain envelope versioning and compatibility rules.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/eventing/event-envelope-schema.md
## Working Agreement
- Deterministic ordering and invariant formatting.
- Use TimeProvider and IGuidGenerator where timestamps or IDs are created.
- Propagate CancellationToken for async operations.
## Testing Strategy
- Unit tests for envelope validation and serialization round-trips.
- Determinism tests for stable ordering and hashes.

View File

@@ -0,0 +1,64 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.ComponentModel.DataAnnotations;
using StellaOps.Eventing.Models;
namespace StellaOps.Eventing;
/// <summary>
/// Configuration options for the eventing library.
/// </summary>
public sealed class EventingOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Eventing";
/// <summary>
/// Name of the service emitting events.
/// </summary>
[Required]
public string ServiceName { get; set; } = "Unknown";
/// <summary>
/// Whether to sign events with DSSE.
/// </summary>
public bool SignEvents { get; set; }
/// <summary>
/// Key ID for event signing (if SignEvents is true).
/// </summary>
public string? SigningKeyId { get; set; }
/// <summary>
/// Engine version reference (auto-detected if null).
/// </summary>
public EngineVersionRef? EngineVersion { get; set; }
/// <summary>
/// Connection string for PostgreSQL event store.
/// </summary>
public string? ConnectionString { get; set; }
/// <summary>
/// Whether to use in-memory store (for testing).
/// </summary>
public bool UseInMemoryStore { get; set; }
/// <summary>
/// Enable outbox pattern for reliable event delivery.
/// </summary>
public bool EnableOutbox { get; set; }
/// <summary>
/// Outbox batch size for processing.
/// </summary>
[Range(1, 10000)]
public int OutboxBatchSize { get; set; } = 100;
/// <summary>
/// Outbox processing interval.
/// </summary>
public TimeSpan OutboxInterval { get; set; } = TimeSpan.FromSeconds(5);
}

View File

@@ -0,0 +1,36 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using StellaOps.Eventing.Models;
namespace StellaOps.Eventing;
/// <summary>
/// Interface for emitting timeline events.
/// </summary>
public interface ITimelineEventEmitter
{
/// <summary>
/// Emits a single event to the timeline.
/// </summary>
/// <typeparam name="TPayload">The payload type.</typeparam>
/// <param name="correlationId">Correlation ID linking related events.</param>
/// <param name="kind">Event kind (ENQUEUE, EXECUTE, etc.).</param>
/// <param name="payload">Event payload object.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The emitted timeline event.</returns>
Task<TimelineEvent> EmitAsync<TPayload>(
string correlationId,
string kind,
TPayload payload,
CancellationToken cancellationToken = default) where TPayload : notnull;
/// <summary>
/// Emits multiple events atomically as a batch.
/// </summary>
/// <param name="events">Collection of pending events.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The emitted timeline events.</returns>
Task<IReadOnlyList<TimelineEvent>> EmitBatchAsync(
IEnumerable<PendingEvent> events,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,56 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.Security.Cryptography;
using System.Text;
using StellaOps.HybridLogicalClock;
namespace StellaOps.Eventing.Internal;
/// <summary>
/// Generates deterministic event IDs from event content.
/// </summary>
internal static class EventIdGenerator
{
/// <summary>
/// Generates a deterministic event ID using SHA-256 hash of inputs.
/// </summary>
/// <param name="correlationId">The correlation ID.</param>
/// <param name="tHlc">The HLC timestamp.</param>
/// <param name="service">The service name.</param>
/// <param name="kind">The event kind.</param>
/// <returns>First 32 hex characters of SHA-256 hash (128 bits).</returns>
public static string Generate(
string correlationId,
HlcTimestamp tHlc,
string service,
string kind)
{
ArgumentException.ThrowIfNullOrWhiteSpace(correlationId);
ArgumentException.ThrowIfNullOrWhiteSpace(service);
ArgumentException.ThrowIfNullOrWhiteSpace(kind);
using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
// Append all inputs in deterministic order
hasher.AppendData(Encoding.UTF8.GetBytes(correlationId));
hasher.AppendData(Encoding.UTF8.GetBytes(tHlc.ToSortableString()));
hasher.AppendData(Encoding.UTF8.GetBytes(service));
hasher.AppendData(Encoding.UTF8.GetBytes(kind));
var hash = hasher.GetHashAndReset();
// Return first 16 bytes (128 bits) as lowercase hex (32 chars)
return Convert.ToHexString(hash.AsSpan(0, 16)).ToLowerInvariant();
}
/// <summary>
/// Computes SHA-256 digest of the payload.
/// </summary>
/// <param name="payload">The canonicalized JSON payload.</param>
/// <returns>SHA-256 digest bytes.</returns>
public static byte[] ComputePayloadDigest(string payload)
{
ArgumentNullException.ThrowIfNull(payload);
return SHA256.HashData(Encoding.UTF8.GetBytes(payload));
}
}

View File

@@ -0,0 +1,57 @@
-- Migration: 20260107_001_create_timeline_events
-- Purpose: Create timeline schema and events table for unified event timeline
-- Create schema
CREATE SCHEMA IF NOT EXISTS timeline;
-- Create events table
CREATE TABLE timeline.events (
event_id TEXT PRIMARY KEY,
t_hlc TEXT NOT NULL, -- HLC timestamp (sortable string format)
ts_wall TIMESTAMPTZ NOT NULL, -- Wall-clock time (informational)
service TEXT NOT NULL, -- Service name
trace_parent TEXT, -- W3C Trace Context traceparent
correlation_id TEXT NOT NULL, -- Correlation ID linking events
kind TEXT NOT NULL, -- Event kind (ENQUEUE, EXECUTE, etc.)
payload JSONB NOT NULL, -- RFC 8785 canonicalized JSON payload
payload_digest BYTEA NOT NULL, -- SHA-256 digest of payload
engine_name TEXT NOT NULL, -- Engine/service name
engine_version TEXT NOT NULL, -- Engine version
engine_digest TEXT NOT NULL, -- Source/assembly digest
dsse_sig TEXT, -- Optional DSSE signature
schema_version INTEGER NOT NULL DEFAULT 1,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Create indexes for common query patterns
CREATE INDEX idx_events_corr_hlc ON timeline.events (correlation_id, t_hlc);
CREATE INDEX idx_events_svc_hlc ON timeline.events (service, t_hlc);
CREATE INDEX idx_events_kind ON timeline.events (kind);
CREATE INDEX idx_events_created_at ON timeline.events (created_at);
-- GIN index for payload queries
CREATE INDEX idx_events_payload ON timeline.events USING GIN (payload);
-- Create outbox table for transactional outbox pattern
CREATE TABLE timeline.outbox (
id BIGSERIAL PRIMARY KEY,
event_id TEXT NOT NULL REFERENCES timeline.events(event_id),
status TEXT NOT NULL DEFAULT 'PENDING', -- PENDING, PROCESSING, COMPLETED, FAILED
retry_count INTEGER NOT NULL DEFAULT 0,
next_retry_at TIMESTAMPTZ,
error_message TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX idx_outbox_status_retry ON timeline.outbox (status, next_retry_at)
WHERE status IN ('PENDING', 'FAILED');
-- Comments for documentation
COMMENT ON TABLE timeline.events IS 'Unified timeline events from all StellaOps services';
COMMENT ON COLUMN timeline.events.event_id IS 'Deterministic event ID: SHA-256(correlation_id || t_hlc || service || kind)[0:32]';
COMMENT ON COLUMN timeline.events.t_hlc IS 'HLC timestamp in sortable string format: {physical}:{logical}:{nodeId}';
COMMENT ON COLUMN timeline.events.ts_wall IS 'Wall-clock time for human reference (not used for ordering)';
COMMENT ON COLUMN timeline.events.correlation_id IS 'Links related events (e.g., scanId, jobId, artifactDigest)';
COMMENT ON COLUMN timeline.events.kind IS 'Event type: ENQUEUE, EXECUTE, ATTEST, VERIFY, GATE_PASS, etc.';
COMMENT ON COLUMN timeline.events.payload IS 'RFC 8785 canonicalized JSON payload for deterministic hashing';

View File

@@ -0,0 +1,175 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.ComponentModel.DataAnnotations;
using System.Reflection;
using System.Text.Json.Serialization;
using StellaOps.HybridLogicalClock;
namespace StellaOps.Eventing.Models;
/// <summary>
/// Canonical event envelope for unified timeline.
/// </summary>
public sealed record TimelineEvent
{
/// <summary>
/// Deterministic event ID: SHA-256(correlation_id || t_hlc || service || kind)[0:32] as hex.
/// </summary>
[JsonPropertyName("event_id")]
public required string EventId { get; init; }
/// <summary>
/// HLC timestamp from StellaOps.HybridLogicalClock.
/// </summary>
[JsonPropertyName("t_hlc")]
public required HlcTimestamp THlc { get; init; }
/// <summary>
/// Wall-clock time (informational only).
/// </summary>
[JsonPropertyName("ts_wall")]
public required DateTimeOffset TsWall { get; init; }
/// <summary>
/// Service name (e.g., "Scheduler", "AirGap", "Attestor").
/// </summary>
[JsonPropertyName("service")]
public required string Service { get; init; }
/// <summary>
/// W3C Trace Context traceparent.
/// </summary>
[JsonPropertyName("trace_parent")]
public string? TraceParent { get; init; }
/// <summary>
/// Correlation ID linking related events.
/// </summary>
[JsonPropertyName("correlation_id")]
public required string CorrelationId { get; init; }
/// <summary>
/// Event kind (ENQUEUE, EXECUTE, EMIT, etc.).
/// </summary>
[JsonPropertyName("kind")]
public required string Kind { get; init; }
/// <summary>
/// RFC 8785 canonicalized JSON payload.
/// </summary>
[JsonPropertyName("payload")]
public required string Payload { get; init; }
/// <summary>
/// SHA-256 digest of Payload.
/// </summary>
[JsonPropertyName("payload_digest")]
public required byte[] PayloadDigest { get; init; }
/// <summary>
/// Engine version for reproducibility.
/// </summary>
[JsonPropertyName("engine_version")]
public required EngineVersionRef EngineVersion { get; init; }
/// <summary>
/// Optional DSSE signature.
/// </summary>
[JsonPropertyName("dsse_sig")]
public string? DsseSig { get; init; }
/// <summary>
/// Schema version (current: 1).
/// </summary>
[JsonPropertyName("schema_version")]
public int SchemaVersion { get; init; } = 1;
}
/// <summary>
/// Engine version reference for reproducibility tracking.
/// </summary>
/// <param name="EngineName">The name of the engine/service.</param>
/// <param name="Version">The version string.</param>
/// <param name="SourceDigest">SHA-256 digest of the source or assembly.</param>
public sealed record EngineVersionRef(
[property: JsonPropertyName("engine_name")] string EngineName,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("source_digest")] string SourceDigest)
{
/// <summary>
/// Creates an EngineVersionRef from the specified assembly metadata.
/// </summary>
public static EngineVersionRef FromAssembly(Assembly assembly)
{
ArgumentNullException.ThrowIfNull(assembly);
var name = assembly.GetName();
var version = name.Version?.ToString() ?? "0.0.0";
// Try to get source digest from assembly metadata
var sourceDigest = assembly
.GetCustomAttributes<AssemblyMetadataAttribute>()
.FirstOrDefault(a => a.Key == "SourceDigest")?.Value
?? "unknown";
return new EngineVersionRef(name.Name ?? "Unknown", version, sourceDigest);
}
/// <summary>
/// Creates an EngineVersionRef from the entry assembly.
/// </summary>
public static EngineVersionRef FromEntryAssembly()
{
var assembly = Assembly.GetEntryAssembly()
?? throw new InvalidOperationException("No entry assembly found");
return FromAssembly(assembly);
}
}
/// <summary>
/// Pending event for batch emission.
/// </summary>
/// <param name="CorrelationId">Correlation ID linking related events.</param>
/// <param name="Kind">Event kind (ENQUEUE, EXECUTE, etc.).</param>
/// <param name="Payload">Event payload object.</param>
public sealed record PendingEvent(
string CorrelationId,
string Kind,
object Payload);
/// <summary>
/// Standard event kinds used across StellaOps services.
/// </summary>
public static class EventKinds
{
// Scheduler events
public const string Enqueue = "ENQUEUE";
public const string Dequeue = "DEQUEUE";
public const string Execute = "EXECUTE";
public const string Complete = "COMPLETE";
public const string Fail = "FAIL";
// AirGap events
public const string Import = "IMPORT";
public const string Export = "EXPORT";
public const string Merge = "MERGE";
public const string Conflict = "CONFLICT";
// Attestor events
public const string Attest = "ATTEST";
public const string Verify = "VERIFY";
// Policy events
public const string Evaluate = "EVALUATE";
public const string GatePass = "GATE_PASS";
public const string GateFail = "GATE_FAIL";
// VexLens events
public const string Consensus = "CONSENSUS";
public const string Override = "OVERRIDE";
// Generic events
public const string Emit = "EMIT";
public const string Ack = "ACK";
public const string Error = "ERR";
}

View File

@@ -0,0 +1,187 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.Data;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Npgsql;
namespace StellaOps.Eventing.Outbox;
/// <summary>
/// Background service that processes the transactional outbox for reliable event delivery.
/// </summary>
public sealed class TimelineOutboxProcessor : BackgroundService
{
private readonly NpgsqlDataSource _dataSource;
private readonly IOptions<EventingOptions> _options;
private readonly ILogger<TimelineOutboxProcessor> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="TimelineOutboxProcessor"/> class.
/// </summary>
public TimelineOutboxProcessor(
NpgsqlDataSource dataSource,
IOptions<EventingOptions> options,
ILogger<TimelineOutboxProcessor> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc/>
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.Value.EnableOutbox)
{
_logger.LogInformation("Outbox processing disabled");
return;
}
_logger.LogInformation(
"Starting outbox processor with batch size {BatchSize} and interval {Interval}",
_options.Value.OutboxBatchSize,
_options.Value.OutboxInterval);
while (!stoppingToken.IsCancellationRequested)
{
try
{
var processedCount = await ProcessBatchAsync(stoppingToken).ConfigureAwait(false);
if (processedCount > 0)
{
_logger.LogDebug("Processed {Count} outbox entries", processedCount);
}
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error processing outbox batch");
}
await Task.Delay(_options.Value.OutboxInterval, stoppingToken).ConfigureAwait(false);
}
_logger.LogInformation("Outbox processor stopped");
}
private async Task<int> ProcessBatchAsync(CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(IsolationLevel.ReadCommitted, cancellationToken).ConfigureAwait(false);
try
{
// Select and lock pending entries
const string selectSql = """
SELECT id, event_id, retry_count
FROM timeline.outbox
WHERE status = 'PENDING'
OR (status = 'FAILED' AND next_retry_at <= NOW())
ORDER BY id
LIMIT @batch_size
FOR UPDATE SKIP LOCKED
""";
await using var selectCmd = new NpgsqlCommand(selectSql, connection, transaction);
selectCmd.Parameters.AddWithValue("@batch_size", _options.Value.OutboxBatchSize);
var entries = new List<(long Id, string EventId, int RetryCount)>();
await using (var reader = await selectCmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false))
{
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
entries.Add((
reader.GetInt64(0),
reader.GetString(1),
reader.GetInt32(2)));
}
}
if (entries.Count == 0)
{
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
return 0;
}
// Process each entry (in real implementation, this would forward to downstream consumers)
var completedIds = new List<long>();
foreach (var entry in entries)
{
try
{
// TODO: Forward event to downstream consumers
// For now, just mark as completed
completedIds.Add(entry.Id);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to process outbox entry {Id}", entry.Id);
await MarkAsFailedAsync(connection, transaction, entry.Id, entry.RetryCount, ex.Message, cancellationToken).ConfigureAwait(false);
}
}
// Mark completed entries
if (completedIds.Count > 0)
{
const string completeSql = """
UPDATE timeline.outbox
SET status = 'COMPLETED', updated_at = NOW()
WHERE id = ANY(@ids)
""";
await using var completeCmd = new NpgsqlCommand(completeSql, connection, transaction);
completeCmd.Parameters.AddWithValue("@ids", completedIds.ToArray());
await completeCmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
return completedIds.Count;
}
catch
{
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
throw;
}
}
private static async Task MarkAsFailedAsync(
NpgsqlConnection connection,
NpgsqlTransaction transaction,
long id,
int retryCount,
string errorMessage,
CancellationToken cancellationToken)
{
// Exponential backoff: 1s, 2s, 4s, 8s, 16s, max 5 retries
var nextRetryDelay = TimeSpan.FromSeconds(Math.Pow(2, retryCount));
var maxRetries = 5;
var newStatus = retryCount >= maxRetries ? "FAILED" : "PENDING";
const string sql = """
UPDATE timeline.outbox
SET status = @status,
retry_count = @retry_count,
next_retry_at = @next_retry_at,
error_message = @error_message,
updated_at = NOW()
WHERE id = @id
""";
await using var cmd = new NpgsqlCommand(sql, connection, transaction);
cmd.Parameters.AddWithValue("@id", id);
cmd.Parameters.AddWithValue("@status", newStatus);
cmd.Parameters.AddWithValue("@retry_count", retryCount + 1);
cmd.Parameters.AddWithValue("@next_retry_at", DateTimeOffset.UtcNow.Add(nextRetryDelay));
cmd.Parameters.AddWithValue("@error_message", errorMessage);
await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,141 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Npgsql;
using StellaOps.Eventing.Signing;
using StellaOps.Eventing.Storage;
using StellaOps.Eventing.Telemetry;
namespace StellaOps.Eventing;
/// <summary>
/// Extension methods for registering eventing services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds StellaOps eventing services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">The configuration.</param>
/// <returns>The service collection.</returns>
public static IServiceCollection AddStellaOpsEventing(
this IServiceCollection services,
IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
// Register options with validation
services.AddOptions<EventingOptions>()
.Bind(configuration.GetSection(EventingOptions.SectionName))
.ValidateDataAnnotations()
.ValidateOnStart();
// Register TimeProvider if not already registered
services.TryAddSingleton(TimeProvider.System);
// Register core services
services.TryAddSingleton<ITimelineEventEmitter, TimelineEventEmitter>();
// Register event store based on configuration
var options = configuration.GetSection(EventingOptions.SectionName).Get<EventingOptions>();
if (options?.UseInMemoryStore == true)
{
services.TryAddSingleton<ITimelineEventStore, InMemoryTimelineEventStore>();
}
else
{
services.TryAddSingleton<ITimelineEventStore, PostgresTimelineEventStore>();
}
// Register telemetry
services.TryAddSingleton<EventingTelemetry>();
return services;
}
/// <summary>
/// Adds StellaOps eventing with PostgreSQL store.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="serviceName">The service name.</param>
/// <param name="connectionString">The PostgreSQL connection string.</param>
/// <returns>The service collection.</returns>
public static IServiceCollection AddStellaOpsEventing(
this IServiceCollection services,
string serviceName,
string connectionString)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(serviceName);
ArgumentException.ThrowIfNullOrWhiteSpace(connectionString);
services.Configure<EventingOptions>(options =>
{
options.ServiceName = serviceName;
options.ConnectionString = connectionString;
});
services.AddOptions<EventingOptions>()
.ValidateDataAnnotations()
.ValidateOnStart();
services.TryAddSingleton(TimeProvider.System);
// Register NpgsqlDataSource
services.TryAddSingleton(_ => NpgsqlDataSource.Create(connectionString));
services.TryAddSingleton<ITimelineEventStore, PostgresTimelineEventStore>();
services.TryAddSingleton<ITimelineEventEmitter, TimelineEventEmitter>();
services.TryAddSingleton<EventingTelemetry>();
return services;
}
/// <summary>
/// Adds StellaOps eventing with in-memory store (for testing).
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="serviceName">The service name.</param>
/// <returns>The service collection.</returns>
public static IServiceCollection AddStellaOpsEventingInMemory(
this IServiceCollection services,
string serviceName)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(serviceName);
services.Configure<EventingOptions>(options =>
{
options.ServiceName = serviceName;
options.UseInMemoryStore = true;
});
services.AddOptions<EventingOptions>()
.ValidateDataAnnotations()
.ValidateOnStart();
services.TryAddSingleton(TimeProvider.System);
services.TryAddSingleton<ITimelineEventStore, InMemoryTimelineEventStore>();
services.TryAddSingleton<ITimelineEventEmitter, TimelineEventEmitter>();
services.TryAddSingleton<EventingTelemetry>();
return services;
}
/// <summary>
/// Adds an event signer for DSSE signing.
/// </summary>
/// <typeparam name="TSigner">The signer implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection.</returns>
public static IServiceCollection AddEventSigner<TSigner>(this IServiceCollection services)
where TSigner : class, IEventSigner
{
services.TryAddSingleton<IEventSigner, TSigner>();
return services;
}
}

View File

@@ -0,0 +1,30 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using StellaOps.Eventing.Models;
namespace StellaOps.Eventing.Signing;
/// <summary>
/// Interface for signing timeline events with DSSE.
/// </summary>
public interface IEventSigner
{
/// <summary>
/// Signs a timeline event and returns the DSSE signature.
/// </summary>
/// <param name="timelineEvent">The event to sign.</param>
/// <returns>The DSSE signature string.</returns>
string Sign(TimelineEvent timelineEvent);
/// <summary>
/// Verifies a timeline event signature.
/// </summary>
/// <param name="timelineEvent">The event with DSSE signature.</param>
/// <returns>True if signature is valid.</returns>
bool Verify(TimelineEvent timelineEvent);
/// <summary>
/// Gets the key ID used for signing.
/// </summary>
string KeyId { get; }
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Eventing</RootNamespace>
<Description>StellaOps Event Envelope SDK for unified timeline events</Description>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
<ProjectReference Include="..\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\StellaOps.Determinism.Abstractions\StellaOps.Determinism.Abstractions.csproj" />
<ProjectReference Include="..\StellaOps.Infrastructure.Postgres\StellaOps.Infrastructure.Postgres.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Microsoft.Extensions.Options.DataAnnotations" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="System.Diagnostics.DiagnosticSource" />
<PackageReference Include="Npgsql" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,86 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using StellaOps.Eventing.Models;
using StellaOps.HybridLogicalClock;
namespace StellaOps.Eventing.Storage;
/// <summary>
/// Interface for timeline event persistence.
/// </summary>
public interface ITimelineEventStore
{
/// <summary>
/// Appends a single event to the store.
/// </summary>
/// <param name="timelineEvent">The event to append.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task representing the operation.</returns>
Task AppendAsync(TimelineEvent timelineEvent, CancellationToken cancellationToken = default);
/// <summary>
/// Appends multiple events atomically.
/// </summary>
/// <param name="events">The events to append.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task representing the operation.</returns>
Task AppendBatchAsync(IEnumerable<TimelineEvent> events, CancellationToken cancellationToken = default);
/// <summary>
/// Gets events by correlation ID, ordered by HLC timestamp.
/// </summary>
/// <param name="correlationId">The correlation ID.</param>
/// <param name="limit">Maximum number of events to return.</param>
/// <param name="offset">Number of events to skip.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Events ordered by HLC timestamp.</returns>
Task<IReadOnlyList<TimelineEvent>> GetByCorrelationIdAsync(
string correlationId,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets events within an HLC range.
/// </summary>
/// <param name="correlationId">The correlation ID.</param>
/// <param name="fromHlc">Start of HLC range (inclusive).</param>
/// <param name="toHlc">End of HLC range (inclusive).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Events within the range, ordered by HLC timestamp.</returns>
Task<IReadOnlyList<TimelineEvent>> GetByHlcRangeAsync(
string correlationId,
HlcTimestamp fromHlc,
HlcTimestamp toHlc,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets events by service.
/// </summary>
/// <param name="service">The service name.</param>
/// <param name="fromHlc">Optional start of HLC range.</param>
/// <param name="limit">Maximum number of events to return.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Events from the service, ordered by HLC timestamp.</returns>
Task<IReadOnlyList<TimelineEvent>> GetByServiceAsync(
string service,
HlcTimestamp? fromHlc = null,
int limit = 100,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a single event by ID.
/// </summary>
/// <param name="eventId">The event ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The event, or null if not found.</returns>
Task<TimelineEvent?> GetByIdAsync(string eventId, CancellationToken cancellationToken = default);
/// <summary>
/// Counts events for a correlation ID.
/// </summary>
/// <param name="correlationId">The correlation ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of events.</returns>
Task<long> CountByCorrelationIdAsync(string correlationId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,126 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.Collections.Concurrent;
using StellaOps.Eventing.Models;
using StellaOps.HybridLogicalClock;
namespace StellaOps.Eventing.Storage;
/// <summary>
/// In-memory implementation of <see cref="ITimelineEventStore"/> for testing.
/// </summary>
public sealed class InMemoryTimelineEventStore : ITimelineEventStore
{
private readonly ConcurrentDictionary<string, TimelineEvent> _events = new();
/// <inheritdoc/>
public Task AppendAsync(TimelineEvent timelineEvent, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(timelineEvent);
_events.TryAdd(timelineEvent.EventId, timelineEvent);
return Task.CompletedTask;
}
/// <inheritdoc/>
public Task AppendBatchAsync(IEnumerable<TimelineEvent> events, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(events);
foreach (var e in events)
{
_events.TryAdd(e.EventId, e);
}
return Task.CompletedTask;
}
/// <inheritdoc/>
public Task<IReadOnlyList<TimelineEvent>> GetByCorrelationIdAsync(
string correlationId,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default)
{
var result = _events.Values
.Where(e => e.CorrelationId == correlationId)
.OrderBy(e => e.THlc.ToSortableString())
.Skip(offset)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<TimelineEvent>>(result);
}
/// <inheritdoc/>
public Task<IReadOnlyList<TimelineEvent>> GetByHlcRangeAsync(
string correlationId,
HlcTimestamp fromHlc,
HlcTimestamp toHlc,
CancellationToken cancellationToken = default)
{
var fromStr = fromHlc.ToSortableString();
var toStr = toHlc.ToSortableString();
var result = _events.Values
.Where(e => e.CorrelationId == correlationId)
.Where(e =>
{
var hlcStr = e.THlc.ToSortableString();
return string.Compare(hlcStr, fromStr, StringComparison.Ordinal) >= 0 &&
string.Compare(hlcStr, toStr, StringComparison.Ordinal) <= 0;
})
.OrderBy(e => e.THlc.ToSortableString())
.ToList();
return Task.FromResult<IReadOnlyList<TimelineEvent>>(result);
}
/// <inheritdoc/>
public Task<IReadOnlyList<TimelineEvent>> GetByServiceAsync(
string service,
HlcTimestamp? fromHlc = null,
int limit = 100,
CancellationToken cancellationToken = default)
{
var query = _events.Values.Where(e => e.Service == service);
if (fromHlc.HasValue)
{
var fromStr = fromHlc.Value.ToSortableString();
query = query.Where(e =>
string.Compare(e.THlc.ToSortableString(), fromStr, StringComparison.Ordinal) >= 0);
}
var result = query
.OrderBy(e => e.THlc.ToSortableString())
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<TimelineEvent>>(result);
}
/// <inheritdoc/>
public Task<TimelineEvent?> GetByIdAsync(string eventId, CancellationToken cancellationToken = default)
{
_events.TryGetValue(eventId, out var e);
return Task.FromResult(e);
}
/// <inheritdoc/>
public Task<long> CountByCorrelationIdAsync(string correlationId, CancellationToken cancellationToken = default)
{
var count = _events.Values.Count(e => e.CorrelationId == correlationId);
return Task.FromResult((long)count);
}
/// <summary>
/// Clears all events (for testing).
/// </summary>
public void Clear() => _events.Clear();
/// <summary>
/// Gets all events (for testing).
/// </summary>
public IReadOnlyCollection<TimelineEvent> GetAll() => _events.Values.ToList();
}

View File

@@ -0,0 +1,312 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.Data;
using System.Globalization;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Eventing.Models;
using StellaOps.HybridLogicalClock;
namespace StellaOps.Eventing.Storage;
/// <summary>
/// PostgreSQL implementation of <see cref="ITimelineEventStore"/>.
/// </summary>
public sealed class PostgresTimelineEventStore : ITimelineEventStore
{
private readonly NpgsqlDataSource _dataSource;
private readonly ILogger<PostgresTimelineEventStore> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="PostgresTimelineEventStore"/> class.
/// </summary>
public PostgresTimelineEventStore(
NpgsqlDataSource dataSource,
ILogger<PostgresTimelineEventStore> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc/>
public async Task AppendAsync(TimelineEvent timelineEvent, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(timelineEvent);
const string sql = """
INSERT INTO timeline.events (
event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
) VALUES (
@event_id, @t_hlc, @ts_wall, @service, @trace_parent,
@correlation_id, @kind, @payload::jsonb, @payload_digest,
@engine_name, @engine_version, @engine_digest, @dsse_sig, @schema_version
)
ON CONFLICT (event_id) DO NOTHING
""";
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
AddEventParameters(command, timelineEvent);
var rowsAffected = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
if (rowsAffected == 0)
{
_logger.LogDebug("Event {EventId} already exists (idempotent insert)", timelineEvent.EventId);
}
}
/// <inheritdoc/>
public async Task AppendBatchAsync(IEnumerable<TimelineEvent> events, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(events);
var eventList = events.ToList();
if (eventList.Count == 0)
{
return;
}
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(IsolationLevel.ReadCommitted, cancellationToken).ConfigureAwait(false);
try
{
const string sql = """
INSERT INTO timeline.events (
event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
) VALUES (
@event_id, @t_hlc, @ts_wall, @service, @trace_parent,
@correlation_id, @kind, @payload::jsonb, @payload_digest,
@engine_name, @engine_version, @engine_digest, @dsse_sig, @schema_version
)
ON CONFLICT (event_id) DO NOTHING
""";
foreach (var timelineEvent in eventList)
{
await using var command = new NpgsqlCommand(sql, connection, transaction);
AddEventParameters(command, timelineEvent);
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
await transaction.CommitAsync(cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Appended batch of {Count} events", eventList.Count);
}
catch
{
await transaction.RollbackAsync(cancellationToken).ConfigureAwait(false);
throw;
}
}
/// <inheritdoc/>
public async Task<IReadOnlyList<TimelineEvent>> GetByCorrelationIdAsync(
string correlationId,
int limit = 100,
int offset = 0,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(correlationId);
const string sql = """
SELECT event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
FROM timeline.events
WHERE correlation_id = @correlation_id
ORDER BY t_hlc ASC
LIMIT @limit OFFSET @offset
""";
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.Parameters.AddWithValue("@correlation_id", correlationId);
command.Parameters.AddWithValue("@limit", limit);
command.Parameters.AddWithValue("@offset", offset);
return await ExecuteQueryAsync(command, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc/>
public async Task<IReadOnlyList<TimelineEvent>> GetByHlcRangeAsync(
string correlationId,
HlcTimestamp fromHlc,
HlcTimestamp toHlc,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(correlationId);
const string sql = """
SELECT event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
FROM timeline.events
WHERE correlation_id = @correlation_id
AND t_hlc >= @from_hlc
AND t_hlc <= @to_hlc
ORDER BY t_hlc ASC
""";
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.Parameters.AddWithValue("@correlation_id", correlationId);
command.Parameters.AddWithValue("@from_hlc", fromHlc.ToSortableString());
command.Parameters.AddWithValue("@to_hlc", toHlc.ToSortableString());
return await ExecuteQueryAsync(command, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc/>
public async Task<IReadOnlyList<TimelineEvent>> GetByServiceAsync(
string service,
HlcTimestamp? fromHlc = null,
int limit = 100,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(service);
var sql = fromHlc.HasValue
? """
SELECT event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
FROM timeline.events
WHERE service = @service AND t_hlc >= @from_hlc
ORDER BY t_hlc ASC
LIMIT @limit
"""
: """
SELECT event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
FROM timeline.events
WHERE service = @service
ORDER BY t_hlc ASC
LIMIT @limit
""";
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.Parameters.AddWithValue("@service", service);
command.Parameters.AddWithValue("@limit", limit);
if (fromHlc.HasValue)
{
command.Parameters.AddWithValue("@from_hlc", fromHlc.Value.ToSortableString());
}
return await ExecuteQueryAsync(command, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc/>
public async Task<TimelineEvent?> GetByIdAsync(string eventId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(eventId);
const string sql = """
SELECT event_id, t_hlc, ts_wall, service, trace_parent,
correlation_id, kind, payload, payload_digest,
engine_name, engine_version, engine_digest, dsse_sig, schema_version
FROM timeline.events
WHERE event_id = @event_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.Parameters.AddWithValue("@event_id", eventId);
var events = await ExecuteQueryAsync(command, cancellationToken).ConfigureAwait(false);
return events.Count > 0 ? events[0] : null;
}
/// <inheritdoc/>
public async Task<long> CountByCorrelationIdAsync(string correlationId, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(correlationId);
const string sql = """
SELECT COUNT(*) FROM timeline.events WHERE correlation_id = @correlation_id
""";
await using var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.Parameters.AddWithValue("@correlation_id", correlationId);
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return Convert.ToInt64(result, CultureInfo.InvariantCulture);
}
private static void AddEventParameters(NpgsqlCommand command, TimelineEvent e)
{
command.Parameters.AddWithValue("@event_id", e.EventId);
command.Parameters.AddWithValue("@t_hlc", e.THlc.ToSortableString());
command.Parameters.AddWithValue("@ts_wall", e.TsWall);
command.Parameters.AddWithValue("@service", e.Service);
command.Parameters.AddWithValue("@trace_parent", (object?)e.TraceParent ?? DBNull.Value);
command.Parameters.AddWithValue("@correlation_id", e.CorrelationId);
command.Parameters.AddWithValue("@kind", e.Kind);
command.Parameters.AddWithValue("@payload", e.Payload);
command.Parameters.AddWithValue("@payload_digest", e.PayloadDigest);
command.Parameters.AddWithValue("@engine_name", e.EngineVersion.EngineName);
command.Parameters.AddWithValue("@engine_version", e.EngineVersion.Version);
command.Parameters.AddWithValue("@engine_digest", e.EngineVersion.SourceDigest);
command.Parameters.AddWithValue("@dsse_sig", (object?)e.DsseSig ?? DBNull.Value);
command.Parameters.AddWithValue("@schema_version", e.SchemaVersion);
}
private static async Task<IReadOnlyList<TimelineEvent>> ExecuteQueryAsync(
NpgsqlCommand command,
CancellationToken cancellationToken)
{
var events = new List<TimelineEvent>();
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
events.Add(MapFromReader(reader));
}
return events;
}
private static TimelineEvent MapFromReader(NpgsqlDataReader reader)
{
var hlcString = reader.GetString(reader.GetOrdinal("t_hlc"));
return new TimelineEvent
{
EventId = reader.GetString(reader.GetOrdinal("event_id")),
THlc = HlcTimestamp.Parse(hlcString),
TsWall = reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("ts_wall")),
Service = reader.GetString(reader.GetOrdinal("service")),
TraceParent = reader.IsDBNull(reader.GetOrdinal("trace_parent"))
? null
: reader.GetString(reader.GetOrdinal("trace_parent")),
CorrelationId = reader.GetString(reader.GetOrdinal("correlation_id")),
Kind = reader.GetString(reader.GetOrdinal("kind")),
Payload = reader.GetString(reader.GetOrdinal("payload")),
PayloadDigest = (byte[])reader.GetValue(reader.GetOrdinal("payload_digest")),
EngineVersion = new EngineVersionRef(
reader.GetString(reader.GetOrdinal("engine_name")),
reader.GetString(reader.GetOrdinal("engine_version")),
reader.GetString(reader.GetOrdinal("engine_digest"))),
DsseSig = reader.IsDBNull(reader.GetOrdinal("dsse_sig"))
? null
: reader.GetString(reader.GetOrdinal("dsse_sig")),
SchemaVersion = reader.GetInt32(reader.GetOrdinal("schema_version"))
};
}
}

View File

@@ -0,0 +1,111 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.Diagnostics;
using System.Diagnostics.Metrics;
namespace StellaOps.Eventing.Telemetry;
/// <summary>
/// Telemetry instrumentation for the eventing library.
/// </summary>
public sealed class EventingTelemetry : IDisposable
{
private readonly Meter _meter;
private readonly Counter<long> _eventsEmittedCounter;
private readonly Counter<long> _eventsPersistFailedCounter;
private readonly Histogram<double> _emitDurationHistogram;
private readonly Counter<long> _batchesEmittedCounter;
/// <summary>
/// Activity source for tracing.
/// </summary>
public static readonly ActivitySource ActivitySource = new("StellaOps.Eventing", "1.0.0");
/// <summary>
/// Initializes a new instance of the <see cref="EventingTelemetry"/> class.
/// </summary>
public EventingTelemetry()
{
_meter = new Meter("StellaOps.Eventing", "1.0.0");
_eventsEmittedCounter = _meter.CreateCounter<long>(
"stellaops_eventing_events_emitted_total",
description: "Total number of timeline events emitted");
_eventsPersistFailedCounter = _meter.CreateCounter<long>(
"stellaops_eventing_events_persist_failed_total",
description: "Total number of events that failed to persist");
_emitDurationHistogram = _meter.CreateHistogram<double>(
"stellaops_eventing_emit_duration_seconds",
unit: "s",
description: "Duration of event emission operations");
_batchesEmittedCounter = _meter.CreateCounter<long>(
"stellaops_eventing_batches_emitted_total",
description: "Total number of event batches emitted");
}
/// <summary>
/// Records a single event emission.
/// </summary>
public void RecordEventEmitted(string service, string kind)
{
_eventsEmittedCounter.Add(1,
new KeyValuePair<string, object?>("service", service),
new KeyValuePair<string, object?>("kind", kind));
}
/// <summary>
/// Records a batch event emission.
/// </summary>
public void RecordBatchEmitted(string service, int count)
{
_batchesEmittedCounter.Add(1,
new KeyValuePair<string, object?>("service", service));
_eventsEmittedCounter.Add(count,
new KeyValuePair<string, object?>("service", service),
new KeyValuePair<string, object?>("kind", "batch"));
}
/// <summary>
/// Records a persist failure.
/// </summary>
public void RecordPersistFailed(string service, string reason)
{
_eventsPersistFailedCounter.Add(1,
new KeyValuePair<string, object?>("service", service),
new KeyValuePair<string, object?>("reason", reason));
}
/// <summary>
/// Records emit duration.
/// </summary>
public void RecordEmitDuration(double durationSeconds, string service)
{
_emitDurationHistogram.Record(durationSeconds,
new KeyValuePair<string, object?>("service", service));
}
/// <summary>
/// Starts an emit activity for tracing.
/// </summary>
public Activity? StartEmitActivity(string correlationId, string kind)
{
return ActivitySource.StartActivity(
"eventing.emit",
ActivityKind.Producer,
parentContext: default,
tags: new[]
{
new KeyValuePair<string, object?>("correlation_id", correlationId),
new KeyValuePair<string, object?>("event_kind", kind)
});
}
/// <inheritdoc/>
public void Dispose()
{
_meter.Dispose();
}
}

View File

@@ -0,0 +1,153 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Eventing.Internal;
using StellaOps.Eventing.Models;
using StellaOps.Eventing.Signing;
using StellaOps.Eventing.Storage;
using StellaOps.HybridLogicalClock;
namespace StellaOps.Eventing;
/// <summary>
/// Implementation of <see cref="ITimelineEventEmitter"/> for emitting timeline events.
/// </summary>
public sealed class TimelineEventEmitter : ITimelineEventEmitter
{
private readonly IHybridLogicalClock _hlc;
private readonly TimeProvider _timeProvider;
private readonly ITimelineEventStore _eventStore;
private readonly IEventSigner? _eventSigner;
private readonly IOptions<EventingOptions> _options;
private readonly ILogger<TimelineEventEmitter> _logger;
private readonly EngineVersionRef _engineVersion;
/// <summary>
/// Initializes a new instance of the <see cref="TimelineEventEmitter"/> class.
/// </summary>
public TimelineEventEmitter(
IHybridLogicalClock hlc,
TimeProvider timeProvider,
ITimelineEventStore eventStore,
IOptions<EventingOptions> options,
ILogger<TimelineEventEmitter> logger,
IEventSigner? eventSigner = null)
{
_hlc = hlc ?? throw new ArgumentNullException(nameof(hlc));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_eventStore = eventStore ?? throw new ArgumentNullException(nameof(eventStore));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_eventSigner = eventSigner;
_engineVersion = options.Value.EngineVersion ?? EngineVersionRef.FromEntryAssembly();
}
/// <inheritdoc/>
public async Task<TimelineEvent> EmitAsync<TPayload>(
string correlationId,
string kind,
TPayload payload,
CancellationToken cancellationToken = default) where TPayload : notnull
{
ArgumentException.ThrowIfNullOrWhiteSpace(correlationId);
ArgumentException.ThrowIfNullOrWhiteSpace(kind);
ArgumentNullException.ThrowIfNull(payload);
var timelineEvent = CreateEvent(correlationId, kind, payload);
await _eventStore.AppendAsync(timelineEvent, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Emitted timeline event {EventId} for {CorrelationId} [{Kind}]",
timelineEvent.EventId,
correlationId,
kind);
return timelineEvent;
}
/// <inheritdoc/>
public async Task<IReadOnlyList<TimelineEvent>> EmitBatchAsync(
IEnumerable<PendingEvent> events,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(events);
var timelineEvents = events
.Select(e => CreateEvent(e.CorrelationId, e.Kind, e.Payload))
.ToList();
if (timelineEvents.Count == 0)
{
return Array.Empty<TimelineEvent>();
}
await _eventStore.AppendBatchAsync(timelineEvents, cancellationToken).ConfigureAwait(false);
_logger.LogDebug("Emitted batch of {Count} timeline events", timelineEvents.Count);
return timelineEvents;
}
private TimelineEvent CreateEvent<TPayload>(
string correlationId,
string kind,
TPayload payload) where TPayload : notnull
{
var tHlc = _hlc.Tick();
var tsWall = _timeProvider.GetUtcNow();
var service = _options.Value.ServiceName;
// Canonicalize payload using RFC 8785
var canonicalPayload = CanonicalizePayload(payload);
var payloadDigest = EventIdGenerator.ComputePayloadDigest(canonicalPayload);
// Generate deterministic event ID
var eventId = EventIdGenerator.Generate(correlationId, tHlc, service, kind);
// Capture trace context if available
var traceParent = Activity.Current?.Id;
var timelineEvent = new TimelineEvent
{
EventId = eventId,
THlc = tHlc,
TsWall = tsWall,
Service = service,
TraceParent = traceParent,
CorrelationId = correlationId,
Kind = kind,
Payload = canonicalPayload,
PayloadDigest = payloadDigest,
EngineVersion = _engineVersion,
SchemaVersion = 1
};
// Sign if signer is available and signing is enabled
if (_eventSigner is not null && _options.Value.SignEvents)
{
var signature = _eventSigner.Sign(timelineEvent);
return timelineEvent with { DsseSig = signature };
}
return timelineEvent;
}
private static string CanonicalizePayload<TPayload>(TPayload payload)
{
// Use RFC 8785 canonicalization
// For now, use standard JSON serialization with sorted keys
// In production, this should use StellaOps.Canonical.Json
var options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false
};
return JsonSerializer.Serialize(payload, options);
}
}

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using StellaOps.Canonical.Json;
namespace StellaOps.Evidence.Core;
@@ -58,7 +59,7 @@ public sealed record EvidenceRecord : IEvidence
PayloadBase64: Convert.ToBase64String(payload),
GeneratorId: provenance.GeneratorId,
GeneratorVersion: provenance.GeneratorVersion,
GeneratedAt: provenance.GeneratedAt.ToUniversalTime().ToString("O"));
GeneratedAt: provenance.GeneratedAt.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture));
return CanonJson.HashVersionedPrefixed(hashInput, CanonVersion.Current);
}

View File

@@ -0,0 +1,25 @@
# AGENTS - Facet Tests
## Roles
- QA / test engineer: deterministic tests for facet extraction, drift, and VEX workflows.
- Backend engineer: align tests with facet library contracts and fixtures.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- src/__Libraries/AGENTS.md
- Current sprint file under docs/implplan/SPRINT_*.md
## Working Directory & Boundaries
- Primary scope: src/__Libraries/StellaOps.Facet.Tests
- Test target: src/__Libraries/StellaOps.Facet
- Avoid cross-module edits unless explicitly noted in the sprint file.
## Determinism and Safety
- Use fixed timestamps and deterministic file paths in test data.
- Avoid Guid.NewGuid in fixtures unless required for isolation.
## Testing
- Cover drift detection, merkle roots, extraction filters, and VEX draft workflows.
- Add negative cases for invalid inputs and ensure deterministic golden vectors.

View File

@@ -0,0 +1,302 @@
// <copyright file="FacetQuotaVexWorkflowE2ETests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_003_FACET (QTA-024)
// Description: E2E test: Quota breach -> VEX draft -> approval workflow
using System.Collections.Immutable;
using Microsoft.Extensions.Time.Testing;
using Xunit;
namespace StellaOps.Facet.Tests;
/// <summary>
/// End-to-end tests for the facet quota breach to VEX approval workflow.
/// Tests the complete pipeline: quota breach detection -> VEX draft emission -> draft approval.
/// </summary>
[Trait("Category", "E2E")]
public sealed class FacetQuotaVexWorkflowE2ETests
{
private readonly FakeTimeProvider _timeProvider;
private readonly InMemoryFacetDriftVexDraftStore _draftStore;
private readonly FacetDriftVexEmitter _vexEmitter;
private readonly FacetDriftVexWorkflow _workflow;
public FacetQuotaVexWorkflowE2ETests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero));
_draftStore = new InMemoryFacetDriftVexDraftStore(_timeProvider);
_vexEmitter = new FacetDriftVexEmitter(FacetDriftVexEmitterOptions.Default, _timeProvider);
_workflow = new FacetDriftVexWorkflow(_vexEmitter, _draftStore);
}
[Fact]
public async Task E2E_QuotaBreach_GeneratesVexDraft_CanBeApproved()
{
// Arrange - Create a drift report with quota breach requiring VEX
var driftReport = CreateQuotaBreachingDriftReport(
imageDigest: "sha256:e2e-test-image",
facetId: "os-packages-dpkg",
churnPercent: 35m);
// Act - Step 1: Process drift and emit VEX drafts
var workflowResult = await _workflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
// Assert - Step 1: VEX draft was created
Assert.True(workflowResult.Success);
Assert.Equal(1, workflowResult.NewDraftsCreated);
Assert.Single(workflowResult.CreatedDraftIds);
var draftId = workflowResult.CreatedDraftIds[0];
// Verify draft was stored
var storedDraft = await _draftStore.FindByIdAsync(draftId, CancellationToken.None);
Assert.NotNull(storedDraft);
Assert.True(storedDraft.RequiresReview);
// Verify draft has pending status
var allDrafts = _draftStore.GetAllForTesting();
var draftWithReview = allDrafts.First(d => d.Draft.DraftId == draftId);
Assert.Equal(FacetDriftVexReviewStatus.Pending, draftWithReview.ReviewStatus);
// Act - Step 2: Approve the draft
var approvalResult = await _workflow.ApproveAsync(
draftId,
reviewedBy: "security-team@example.com",
notes: "Reviewed file changes, approved as authorized security patch",
CancellationToken.None);
// Assert - Step 2: Draft was approved
Assert.True(approvalResult);
var approvedDraftWrapper = _draftStore.GetAllForTesting().First(d => d.Draft.DraftId == draftId);
Assert.Equal(FacetDriftVexReviewStatus.Approved, approvedDraftWrapper.ReviewStatus);
Assert.Equal("security-team@example.com", approvedDraftWrapper.ReviewedBy);
}
[Fact]
public async Task E2E_MultipleFacetBreaches_AllGenerateDrafts()
{
// Arrange - Multiple facets exceeding quotas
var facetDrifts = new[]
{
CreateFacetDrift("os-packages-dpkg", QuotaVerdict.RequiresVex, 25m),
CreateFacetDrift("lang-deps-npm", QuotaVerdict.RequiresVex, 30m),
CreateFacetDrift("binaries-usr", QuotaVerdict.Ok, 2m) // OK, should not generate draft
};
var driftReport = new FacetDriftReport
{
ImageDigest = "sha256:multi-facet-test",
BaselineSealId = "seal-multi-001",
AnalyzedAt = _timeProvider.GetUtcNow(),
FacetDrifts = [.. facetDrifts],
OverallVerdict = QuotaVerdict.RequiresVex
};
// Act
var result = await _workflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
// Assert
Assert.True(result.Success);
Assert.Equal(2, result.NewDraftsCreated);
Assert.Equal(2, result.CreatedDraftIds.Length);
// Verify correct facets generated drafts
var allDrafts = _draftStore.GetAllForTesting();
Assert.Contains(allDrafts, d => d.Draft.FacetId == "os-packages-dpkg");
Assert.Contains(allDrafts, d => d.Draft.FacetId == "lang-deps-npm");
Assert.DoesNotContain(allDrafts, d => d.Draft.FacetId == "binaries-usr");
}
[Fact]
public async Task E2E_DraftRejection_MarksAsRejected()
{
// Arrange
var driftReport = CreateQuotaBreachingDriftReport(
imageDigest: "sha256:rejection-test",
facetId: "suspicious-facet",
churnPercent: 80m);
var workflowResult = await _workflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
var draftId = workflowResult.CreatedDraftIds[0];
// Act - Reject the draft
var result = await _workflow.RejectAsync(
draftId,
reviewedBy: "security-team@example.com",
reason: "Unauthorized change detected - requires investigation",
CancellationToken.None);
// Assert
Assert.True(result);
var rejectedDraft = _draftStore.GetAllForTesting().First(d => d.Draft.DraftId == draftId);
Assert.Equal(FacetDriftVexReviewStatus.Rejected, rejectedDraft.ReviewStatus);
Assert.Equal("security-team@example.com", rejectedDraft.ReviewedBy);
Assert.Contains("Unauthorized", rejectedDraft.ReviewNotes);
}
[Fact]
public async Task E2E_DraftExpiration_AfterTtl()
{
// Arrange - Use shorter TTL for testing
var shortTtlOptions = new FacetDriftVexEmitterOptions { DraftTtl = TimeSpan.FromDays(3) };
var shortTtlEmitter = new FacetDriftVexEmitter(shortTtlOptions, _timeProvider);
var shortTtlWorkflow = new FacetDriftVexWorkflow(shortTtlEmitter, _draftStore);
var driftReport = CreateQuotaBreachingDriftReport(
imageDigest: "sha256:expiry-test-short-ttl",
facetId: "test-facet",
churnPercent: 20m);
var workflowResult = await shortTtlWorkflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
var draftId = workflowResult.CreatedDraftIds[0];
// Get the draft's expiration time
var draft = await _draftStore.FindByIdAsync(draftId, CancellationToken.None);
Assert.NotNull(draft);
var expiresAt = draft.ExpiresAt;
// Act - Advance time past TTL (3 days + 1 day buffer)
_timeProvider.Advance(TimeSpan.FromDays(4));
// Assert - Draft's ExpiresAt should be before current time
var now = _timeProvider.GetUtcNow();
Assert.True(expiresAt < now, $"Draft should be expired: ExpiresAt={expiresAt}, Now={now}");
}
[Fact]
public async Task E2E_OverdueDrafts_CanBeQueried()
{
// Arrange - Use short review SLA for testing
var shortSlaOptions = new FacetDriftVexEmitterOptions { ReviewSlaDays = 2 };
var shortSlaEmitter = new FacetDriftVexEmitter(shortSlaOptions, _timeProvider);
var shortSlaWorkflow = new FacetDriftVexWorkflow(shortSlaEmitter, _draftStore);
var driftReport = CreateQuotaBreachingDriftReport(
imageDigest: "sha256:overdue-test-short-sla",
facetId: "overdue-facet",
churnPercent: 25m);
await shortSlaWorkflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
// Act - Advance time past review deadline (2 days + 1 day buffer)
_timeProvider.Advance(TimeSpan.FromDays(3));
// Query using the store directly with advanced time
var asOf = _timeProvider.GetUtcNow();
var overdueDrafts = await _draftStore.GetOverdueAsync(asOf, CancellationToken.None);
// Assert
Assert.Single(overdueDrafts);
Assert.Equal("sha256:overdue-test-short-sla", overdueDrafts[0].ImageDigest);
}
[Fact]
public async Task E2E_DraftContainsAuditTrail()
{
// Arrange
var driftReport = CreateQuotaBreachingDriftReport(
imageDigest: "sha256:audit-test",
facetId: "audited-facet",
churnPercent: 25m);
// Act
var workflowResult = await _workflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
var draftId = workflowResult.CreatedDraftIds[0];
var draft = await _draftStore.FindByIdAsync(draftId, CancellationToken.None);
// Assert
Assert.NotNull(draft);
Assert.Equal("sha256:audit-test", draft.ImageDigest);
Assert.NotNull(draft.DriftSummary);
Assert.Equal(25m, draft.DriftSummary.ChurnPercent);
Assert.NotEmpty(draft.EvidenceLinks);
Assert.Contains(draft.EvidenceLinks, l => l.Type == "facet_drift_analysis");
}
[Fact]
public async Task E2E_PendingDraftsCanBeQueried()
{
// Arrange - Create multiple drafts
var driftReport1 = CreateQuotaBreachingDriftReport("sha256:pending-test-image-001", "facet-1", 25m);
var driftReport2 = CreateQuotaBreachingDriftReport("sha256:pending-test-image-002", "facet-2", 30m);
await _workflow.ExecuteAsync(driftReport1, skipExisting: true, CancellationToken.None);
await _workflow.ExecuteAsync(driftReport2, skipExisting: true, CancellationToken.None);
// Act
var pendingDrafts = await _workflow.GetPendingDraftsAsync(ct: CancellationToken.None);
// Assert
Assert.Equal(2, pendingDrafts.Length);
}
[Fact]
public async Task E2E_SkipExistingDrafts_PreventsDuplicates()
{
// Arrange
var driftReport = CreateQuotaBreachingDriftReport(
imageDigest: "sha256:duplicate-test",
facetId: "test-facet",
churnPercent: 20m);
// Act - Execute workflow twice
var result1 = await _workflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
var result2 = await _workflow.ExecuteAsync(driftReport, skipExisting: true, CancellationToken.None);
// Assert
Assert.Equal(1, result1.NewDraftsCreated);
Assert.Equal(0, result2.NewDraftsCreated);
Assert.Equal(1, result2.ExistingDraftsSkipped);
}
#region Helper Methods
private FacetDriftReport CreateQuotaBreachingDriftReport(string imageDigest, string facetId, decimal churnPercent)
{
var addedCount = (int)churnPercent;
var addedFiles = Enumerable.Range(0, addedCount)
.Select(i => new FacetFileEntry($"/pkg/added{i}.deb", $"sha256:added{i}", 1024, null))
.ToImmutableArray();
var facetDrift = new FacetDrift
{
FacetId = facetId,
Added = addedFiles,
Removed = [],
Modified = [],
DriftScore = churnPercent,
QuotaVerdict = QuotaVerdict.RequiresVex,
BaselineFileCount = 100
};
return new FacetDriftReport
{
ImageDigest = imageDigest,
BaselineSealId = $"seal-{imageDigest.Substring(7, 8)}",
AnalyzedAt = _timeProvider.GetUtcNow(),
FacetDrifts = [facetDrift],
OverallVerdict = QuotaVerdict.RequiresVex
};
}
private static FacetDrift CreateFacetDrift(string facetId, QuotaVerdict verdict, decimal churnPercent)
{
var addedCount = (int)churnPercent;
var addedFiles = Enumerable.Range(0, addedCount)
.Select(i => new FacetFileEntry($"/{facetId}/file{i}", $"sha256:{facetId}{i}", 100, null))
.ToImmutableArray();
return new FacetDrift
{
FacetId = facetId,
Added = addedFiles,
Removed = [],
Modified = [],
DriftScore = churnPercent,
QuotaVerdict = verdict,
BaselineFileCount = 100
};
}
#endregion
}

View File

@@ -13,8 +13,6 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
</ItemGroup>
<ItemGroup>

View File

@@ -0,0 +1,26 @@
# AGENTS - Facet Library
## Roles
- Backend engineer: facet extraction, sealing, drift detection, and VEX draft workflow.
- QA / test engineer: deterministic fixtures and coverage for extraction and drift workflows.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- src/__Libraries/AGENTS.md
- Current sprint file under docs/implplan/SPRINT_*.md
## Working Directory & Boundaries
- Primary scope: src/__Libraries/StellaOps.Facet
- Test scope: src/__Libraries/StellaOps.Facet.Tests
- Avoid cross-module edits unless explicitly noted in the sprint file.
## Determinism and Safety
- Use TimeProvider and deterministic ordering for hashes, roots, and reports.
- Validate hash algorithms, limits, and glob patterns before processing.
- Keep outputs ASCII and stable for hashing/signing workflows.
## Testing
- Cover extraction ordering, drift calculations, and VEX draft workflow paths.
- Add negative cases for invalid inputs and boundary conditions.

View File

@@ -0,0 +1,24 @@
# AGENTS - HybridLogicalClock Benchmarks
## Roles
- Backend engineer: performance harness and benchmark configuration.
- QA / bench engineer: deterministic inputs and repeatable benchmark fixtures.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- src/__Libraries/AGENTS.md
- Current sprint file under docs/implplan/SPRINT_*.md
## Working Directory & Boundaries
- Primary scope: src/__Libraries/StellaOps.HybridLogicalClock.Benchmarks
- Benchmark target: src/__Libraries/StellaOps.HybridLogicalClock
- Avoid cross-module edits unless explicitly noted in the sprint file.
## Determinism and Safety
- Use fixed timestamps and seeded randomness for repeatable benchmark setup.
- Keep benchmark inputs bounded and offline.
## Testing
- Add minimal smoke tests if benchmark helpers are reused in production paths.

View File

@@ -0,0 +1,24 @@
# AGENTS - HybridLogicalClock Tests
## Roles
- QA / test engineer: deterministic unit tests for HLC parsing, tick, and state store behavior.
- Backend engineer: align tests with HLC library contracts.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- src/__Libraries/AGENTS.md
- Current sprint file under docs/implplan/SPRINT_*.md
## Working Directory & Boundaries
- Primary scope: src/__Libraries/StellaOps.HybridLogicalClock.Tests
- Test target: src/__Libraries/StellaOps.HybridLogicalClock
- Avoid cross-module edits unless explicitly noted in the sprint file.
## Determinism and Safety
- Prefer fixed timestamps via FakeTimeProvider.
- Avoid DateTimeOffset.UtcNow and Guid.NewGuid in test fixtures.
## Testing
- Cover parse/compare, tick/receive, and state store behavior with edge cases.

View File

@@ -13,13 +13,7 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Text.Json;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
@@ -190,7 +191,7 @@ public sealed class PostgresProvcacheRepository : IProvcacheRepository
if (deleted > 0)
{
await LogRevocationAsync("expired", asOf.ToString("O"), "ttl-expiry", deleted, cancellationToken)
await LogRevocationAsync("expired", asOf.ToString("O", CultureInfo.InvariantCulture), "ttl-expiry", deleted, cancellationToken)
.ConfigureAwait(false);
}

View File

@@ -3,6 +3,7 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// ----------------------------------------------------------------------------
using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -182,10 +183,10 @@ public sealed class ProvcacheOciAttestationBuilder : IProvcacheOciAttestationBui
{
FeedIds = digest.ReplaySeed.FeedIds,
RuleIds = digest.ReplaySeed.RuleIds,
FrozenEpoch = digest.ReplaySeed.FrozenEpoch?.ToString("O")
FrozenEpoch = digest.ReplaySeed.FrozenEpoch?.ToString("O", CultureInfo.InvariantCulture)
},
CreatedAt = digest.CreatedAt.ToString("O"),
ExpiresAt = digest.ExpiresAt.ToString("O"),
CreatedAt = digest.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
ExpiresAt = digest.ExpiresAt.ToString("O", CultureInfo.InvariantCulture),
VerdictSummary = request.VerdictSummary
};
}

View File

@@ -9,10 +9,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />

View File

@@ -1,3 +1,4 @@
using System.Globalization;
using System.Linq;
using StellaOps.Replay.Core.Models;
using YamlDotNet.Serialization;
@@ -21,7 +22,7 @@ public sealed class ReplayManifestWriter
snapshot = new
{
id = snapshot.SnapshotId,
createdAt = snapshot.CreatedAt.ToString("O"),
createdAt = snapshot.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
artifact = snapshot.ArtifactDigest,
previousId = snapshot.PreviousSnapshotId
},
@@ -68,7 +69,7 @@ public sealed class ReplayManifestWriter
name = f.Name,
version = f.Version,
digest = f.Digest,
fetchedAt = f.FetchedAt.ToString("O")
fetchedAt = f.FetchedAt.ToString("O", CultureInfo.InvariantCulture)
}),
lattice = new
{

View File

@@ -0,0 +1,25 @@
# SPDX3 Library Charter
## Mission
- Provide SPDX 3.0.1 parsing, validation, and profile support.
## Responsibilities
- Parse SPDX JSON-LD and surface deterministic models.
- Validate profile conformance and identifiers.
- Resolve contexts with offline-friendly defaults.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/sbom-service/architecture.md
- docs/modules/sbom-service/spdx3-profile-support.md
## Working Agreement
- Deterministic parsing and invariant formatting.
- Use TimeProvider and IGuidGenerator where timestamps or IDs are created.
- Avoid network dependencies unless explicitly enabled.
## Testing Strategy
- Unit tests for parser/validator behavior and error paths.
- Determinism tests for stable ordering and output.

View File

@@ -0,0 +1,142 @@
// <copyright file="ISpdx3Parser.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using StellaOps.Spdx3.Model;
namespace StellaOps.Spdx3;
/// <summary>
/// Interface for SPDX 3.0.1 document parsing.
/// </summary>
public interface ISpdx3Parser
{
/// <summary>
/// Parses an SPDX 3.0.1 document from a stream.
/// </summary>
/// <param name="stream">The input stream containing JSON-LD.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The parse result.</returns>
Task<Spdx3ParseResult> ParseAsync(
Stream stream,
CancellationToken cancellationToken = default);
/// <summary>
/// Parses an SPDX 3.0.1 document from a file path.
/// </summary>
/// <param name="filePath">The path to the JSON-LD file.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The parse result.</returns>
Task<Spdx3ParseResult> ParseAsync(
string filePath,
CancellationToken cancellationToken = default);
/// <summary>
/// Parses an SPDX 3.0.1 document from JSON text.
/// </summary>
/// <param name="json">The JSON-LD text.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The parse result.</returns>
Task<Spdx3ParseResult> ParseFromJsonAsync(
string json,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of parsing an SPDX 3.0.1 document.
/// </summary>
public sealed record Spdx3ParseResult
{
/// <summary>
/// Gets whether parsing was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Gets the parsed document (if successful).
/// </summary>
public Spdx3Document? Document { get; init; }
/// <summary>
/// Gets any parsing errors.
/// </summary>
public IReadOnlyList<Spdx3ParseError> Errors { get; init; } = [];
/// <summary>
/// Gets any parsing warnings.
/// </summary>
public IReadOnlyList<Spdx3ParseWarning> Warnings { get; init; } = [];
/// <summary>
/// Creates a successful parse result.
/// </summary>
/// <param name="document">The parsed document.</param>
/// <param name="warnings">Any warnings.</param>
/// <returns>The result.</returns>
public static Spdx3ParseResult Succeeded(
Spdx3Document document,
IReadOnlyList<Spdx3ParseWarning>? warnings = null)
{
return new Spdx3ParseResult
{
Success = true,
Document = document,
Warnings = warnings ?? []
};
}
/// <summary>
/// Creates a failed parse result.
/// </summary>
/// <param name="errors">The errors.</param>
/// <param name="warnings">Any warnings.</param>
/// <returns>The result.</returns>
public static Spdx3ParseResult Failed(
IReadOnlyList<Spdx3ParseError> errors,
IReadOnlyList<Spdx3ParseWarning>? warnings = null)
{
return new Spdx3ParseResult
{
Success = false,
Errors = errors,
Warnings = warnings ?? []
};
}
/// <summary>
/// Creates a failed parse result from a single error.
/// </summary>
/// <param name="code">The error code.</param>
/// <param name="message">The error message.</param>
/// <param name="path">The JSON path where the error occurred.</param>
/// <returns>The result.</returns>
public static Spdx3ParseResult Failed(
string code,
string message,
string? path = null)
{
return Failed([new Spdx3ParseError(code, message, path)]);
}
}
/// <summary>
/// Represents a parsing error.
/// </summary>
/// <param name="Code">Error code.</param>
/// <param name="Message">Error message.</param>
/// <param name="Path">JSON path where the error occurred.</param>
public sealed record Spdx3ParseError(
string Code,
string Message,
string? Path = null);
/// <summary>
/// Represents a parsing warning.
/// </summary>
/// <param name="Code">Warning code.</param>
/// <param name="Message">Warning message.</param>
/// <param name="Path">JSON path where the warning occurred.</param>
public sealed record Spdx3ParseWarning(
string Code,
string Message,
string? Path = null);

View File

@@ -0,0 +1,247 @@
// <copyright file="Spdx3ContextResolver.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Spdx3.JsonLd;
/// <summary>
/// Resolves JSON-LD contexts for SPDX 3.0.1 documents.
/// </summary>
public interface ISpdx3ContextResolver
{
/// <summary>
/// Resolves a context from a URL or embedded reference.
/// </summary>
/// <param name="contextRef">The context reference (URL or inline).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The resolved context as a JSON element.</returns>
Task<Spdx3Context?> ResolveAsync(
string contextRef,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Represents a resolved JSON-LD context.
/// </summary>
public sealed record Spdx3Context
{
/// <summary>
/// Gets the context URI.
/// </summary>
public required string Uri { get; init; }
/// <summary>
/// Gets the context document.
/// </summary>
public required JsonDocument Document { get; init; }
/// <summary>
/// Gets when the context was resolved.
/// </summary>
public required DateTimeOffset ResolvedAt { get; init; }
}
/// <summary>
/// Options for the SPDX 3 context resolver.
/// </summary>
public sealed class Spdx3ContextResolverOptions
{
/// <summary>
/// Gets or sets the cache TTL for resolved contexts.
/// </summary>
public TimeSpan CacheTtl { get; set; } = TimeSpan.FromHours(24);
/// <summary>
/// Gets or sets the maximum cache size.
/// </summary>
public int MaxCacheSize { get; set; } = 100;
/// <summary>
/// Gets or sets whether to allow remote context resolution.
/// Set to false for air-gapped environments.
/// </summary>
public bool AllowRemoteContexts { get; set; } = true;
/// <summary>
/// Gets or sets the base path for local context files.
/// </summary>
public string? LocalContextPath { get; set; }
/// <summary>
/// Gets or sets the HTTP timeout for remote contexts.
/// </summary>
public TimeSpan HttpTimeout { get; set; } = TimeSpan.FromSeconds(30);
}
/// <summary>
/// Implementation of context resolver with caching.
/// </summary>
public sealed class Spdx3ContextResolver : ISpdx3ContextResolver, IDisposable
{
private readonly IHttpClientFactory _httpClientFactory;
private readonly IMemoryCache _cache;
private readonly ILogger<Spdx3ContextResolver> _logger;
private readonly Spdx3ContextResolverOptions _options;
private readonly TimeProvider _timeProvider;
private static readonly Dictionary<string, string> EmbeddedContexts = new(StringComparer.OrdinalIgnoreCase)
{
["https://spdx.org/rdf/3.0.1/spdx-context.jsonld"] = GetEmbeddedContext("spdx-context.jsonld"),
["https://spdx.org/rdf/3.0.1/terms/Core"] = GetEmbeddedContext("core-profile.jsonld"),
["https://spdx.org/rdf/3.0.1/terms/Software"] = GetEmbeddedContext("software-profile.jsonld"),
["https://spdx.org/rdf/3.0.1/terms/Security"] = GetEmbeddedContext("security-profile.jsonld"),
["https://spdx.org/rdf/3.0.1/terms/Build"] = GetEmbeddedContext("build-profile.jsonld"),
["https://spdx.org/rdf/3.0.1/terms/Lite"] = GetEmbeddedContext("lite-profile.jsonld")
};
/// <summary>
/// Initializes a new instance of the <see cref="Spdx3ContextResolver"/> class.
/// </summary>
public Spdx3ContextResolver(
IHttpClientFactory httpClientFactory,
IMemoryCache cache,
ILogger<Spdx3ContextResolver> logger,
IOptions<Spdx3ContextResolverOptions> options,
TimeProvider timeProvider)
{
_httpClientFactory = httpClientFactory;
_cache = cache;
_logger = logger;
_options = options.Value;
_timeProvider = timeProvider;
}
/// <inheritdoc />
public async Task<Spdx3Context?> ResolveAsync(
string contextRef,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(contextRef))
{
return null;
}
// Check cache first
var cacheKey = $"spdx3-context:{contextRef}";
if (_cache.TryGetValue(cacheKey, out Spdx3Context? cached))
{
_logger.LogDebug("Context cache hit for {ContextRef}", contextRef);
return cached;
}
// Try embedded contexts first (for air-gap support)
if (EmbeddedContexts.TryGetValue(contextRef, out var embedded))
{
var context = CreateContext(contextRef, embedded);
CacheContext(cacheKey, context);
return context;
}
// Try local file if configured
if (!string.IsNullOrEmpty(_options.LocalContextPath))
{
var localPath = Path.Combine(_options.LocalContextPath, GetContextFileName(contextRef));
if (File.Exists(localPath))
{
var content = await File.ReadAllTextAsync(localPath, cancellationToken)
.ConfigureAwait(false);
var context = CreateContext(contextRef, content);
CacheContext(cacheKey, context);
return context;
}
}
// Fetch remote if allowed
if (!_options.AllowRemoteContexts)
{
_logger.LogWarning("Remote context resolution disabled, cannot resolve {ContextRef}", contextRef);
return null;
}
return await FetchRemoteContextAsync(contextRef, cacheKey, cancellationToken)
.ConfigureAwait(false);
}
private async Task<Spdx3Context?> FetchRemoteContextAsync(
string contextRef,
string cacheKey,
CancellationToken cancellationToken)
{
try
{
var client = _httpClientFactory.CreateClient("Spdx3Context");
client.Timeout = _options.HttpTimeout;
_logger.LogInformation("Fetching remote context {ContextRef}", contextRef);
var content = await client.GetStringAsync(contextRef, cancellationToken)
.ConfigureAwait(false);
var context = CreateContext(contextRef, content);
CacheContext(cacheKey, context);
return context;
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to fetch remote context {ContextRef}", contextRef);
return null;
}
}
private Spdx3Context CreateContext(string uri, string content)
{
return new Spdx3Context
{
Uri = uri,
Document = JsonDocument.Parse(content),
ResolvedAt = _timeProvider.GetUtcNow()
};
}
private void CacheContext(string cacheKey, Spdx3Context context)
{
var cacheOptions = new MemoryCacheEntryOptions
{
Size = 1,
SlidingExpiration = _options.CacheTtl
};
_cache.Set(cacheKey, context, cacheOptions);
}
private static string GetContextFileName(string uri)
{
var lastSlash = uri.LastIndexOf('/');
return lastSlash >= 0 ? uri[(lastSlash + 1)..] : uri;
}
private static string GetEmbeddedContext(string name)
{
// In a real implementation, this would load from embedded resources
// For now, return a minimal stub that allows parsing
return name switch
{
"spdx-context.jsonld" => """
{
"@context": {
"spdx": "https://spdx.org/rdf/3.0.1/terms/",
"Core": "spdx:Core/",
"Software": "spdx:Software/",
"spdxId": "@id",
"@type": "@type"
}
}
""",
_ => "{}"
};
}
/// <inheritdoc />
public void Dispose()
{
// IMemoryCache is typically managed by DI
}
}

View File

@@ -0,0 +1,319 @@
// <copyright file="Spdx3Package.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model.Software;
/// <summary>
/// Represents an SPDX 3.0.1 Package (Software Profile).
/// </summary>
public sealed record Spdx3Package : Spdx3Element
{
/// <summary>
/// Gets the package version.
/// </summary>
[JsonPropertyName("packageVersion")]
public string? PackageVersion { get; init; }
/// <summary>
/// Gets the download location URL.
/// </summary>
[JsonPropertyName("downloadLocation")]
public string? DownloadLocation { get; init; }
/// <summary>
/// Gets the Package URL (PURL).
/// </summary>
[JsonPropertyName("packageUrl")]
public string? PackageUrl { get; init; }
/// <summary>
/// Gets the home page URL.
/// </summary>
[JsonPropertyName("homePage")]
public string? HomePage { get; init; }
/// <summary>
/// Gets source information.
/// </summary>
[JsonPropertyName("sourceInfo")]
public string? SourceInfo { get; init; }
/// <summary>
/// Gets the primary purpose of the package.
/// </summary>
[JsonPropertyName("primaryPurpose")]
public Spdx3SoftwarePurpose? PrimaryPurpose { get; init; }
/// <summary>
/// Gets additional purposes.
/// </summary>
[JsonPropertyName("additionalPurpose")]
public ImmutableArray<Spdx3SoftwarePurpose> AdditionalPurpose { get; init; } = [];
/// <summary>
/// Gets the copyright text.
/// </summary>
[JsonPropertyName("copyrightText")]
public string? CopyrightText { get; init; }
/// <summary>
/// Gets attribution text.
/// </summary>
[JsonPropertyName("attributionText")]
public ImmutableArray<string> AttributionText { get; init; } = [];
/// <summary>
/// Gets the originator of the package.
/// </summary>
[JsonPropertyName("originatedBy")]
public ImmutableArray<string> OriginatedBy { get; init; } = [];
/// <summary>
/// Gets the supplier of the package.
/// </summary>
[JsonPropertyName("suppliedBy")]
public string? SuppliedBy { get; init; }
/// <summary>
/// Gets the release time of the package.
/// </summary>
[JsonPropertyName("releaseTime")]
public DateTimeOffset? ReleaseTime { get; init; }
/// <summary>
/// Gets the build time of the package.
/// </summary>
[JsonPropertyName("buildTime")]
public DateTimeOffset? BuildTime { get; init; }
/// <summary>
/// Gets the valid until time.
/// </summary>
[JsonPropertyName("validUntilTime")]
public DateTimeOffset? ValidUntilTime { get; init; }
}
/// <summary>
/// Represents an SPDX 3.0.1 File (Software Profile).
/// </summary>
public sealed record Spdx3File : Spdx3Element
{
/// <summary>
/// Gets the file name.
/// </summary>
[JsonPropertyName("name")]
public new required string Name { get; init; }
/// <summary>
/// Gets the primary purpose.
/// </summary>
[JsonPropertyName("primaryPurpose")]
public Spdx3SoftwarePurpose? PrimaryPurpose { get; init; }
/// <summary>
/// Gets the content type (MIME type).
/// </summary>
[JsonPropertyName("contentType")]
public string? ContentType { get; init; }
/// <summary>
/// Gets the copyright text.
/// </summary>
[JsonPropertyName("copyrightText")]
public string? CopyrightText { get; init; }
}
/// <summary>
/// Represents an SPDX 3.0.1 Snippet (Software Profile).
/// </summary>
public sealed record Spdx3Snippet : Spdx3Element
{
/// <summary>
/// Gets the file containing this snippet.
/// </summary>
[JsonPropertyName("snippetFromFile")]
public required string SnippetFromFile { get; init; }
/// <summary>
/// Gets the byte range.
/// </summary>
[JsonPropertyName("byteRange")]
public Spdx3PositiveIntegerRange? ByteRange { get; init; }
/// <summary>
/// Gets the line range.
/// </summary>
[JsonPropertyName("lineRange")]
public Spdx3PositiveIntegerRange? LineRange { get; init; }
/// <summary>
/// Gets the primary purpose.
/// </summary>
[JsonPropertyName("primaryPurpose")]
public Spdx3SoftwarePurpose? PrimaryPurpose { get; init; }
/// <summary>
/// Gets the copyright text.
/// </summary>
[JsonPropertyName("copyrightText")]
public string? CopyrightText { get; init; }
}
/// <summary>
/// Represents a positive integer range.
/// </summary>
public sealed record Spdx3PositiveIntegerRange
{
/// <summary>
/// Gets the begin value.
/// </summary>
[JsonPropertyName("beginIntegerRange")]
public required int Begin { get; init; }
/// <summary>
/// Gets the end value.
/// </summary>
[JsonPropertyName("endIntegerRange")]
public required int End { get; init; }
}
/// <summary>
/// Software purpose types.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3SoftwarePurpose
{
/// <summary>
/// Application software.
/// </summary>
Application,
/// <summary>
/// Archive (zip, tar, etc.).
/// </summary>
Archive,
/// <summary>
/// BOM (Bill of Materials).
/// </summary>
Bom,
/// <summary>
/// Configuration file.
/// </summary>
Configuration,
/// <summary>
/// Container image.
/// </summary>
Container,
/// <summary>
/// Data file.
/// </summary>
Data,
/// <summary>
/// Device driver.
/// </summary>
Device,
/// <summary>
/// Device driver (alternative).
/// </summary>
DeviceDriver,
/// <summary>
/// Documentation.
/// </summary>
Documentation,
/// <summary>
/// Evidence (compliance).
/// </summary>
Evidence,
/// <summary>
/// Executable.
/// </summary>
Executable,
/// <summary>
/// File.
/// </summary>
File,
/// <summary>
/// Firmware.
/// </summary>
Firmware,
/// <summary>
/// Framework.
/// </summary>
Framework,
/// <summary>
/// Install script.
/// </summary>
Install,
/// <summary>
/// Library.
/// </summary>
Library,
/// <summary>
/// Machine learning model.
/// </summary>
Model,
/// <summary>
/// Module.
/// </summary>
Module,
/// <summary>
/// Operating system.
/// </summary>
OperatingSystem,
/// <summary>
/// Other purpose.
/// </summary>
Other,
/// <summary>
/// Patch.
/// </summary>
Patch,
/// <summary>
/// Platform.
/// </summary>
Platform,
/// <summary>
/// Requirement.
/// </summary>
Requirement,
/// <summary>
/// Source code.
/// </summary>
Source,
/// <summary>
/// Specification.
/// </summary>
Specification,
/// <summary>
/// Test.
/// </summary>
Test
}

View File

@@ -0,0 +1,81 @@
// <copyright file="Spdx3SpdxDocument.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model.Software;
/// <summary>
/// Represents an SPDX 3.0.1 SpdxDocument (Software Profile).
/// The SpdxDocument is the root element that describes the SBOM itself.
/// </summary>
public sealed record Spdx3SpdxDocument : Spdx3Element
{
/// <summary>
/// Gets the namespace for this document.
/// </summary>
[JsonPropertyName("namespaceMap")]
public ImmutableArray<Spdx3NamespaceMap> NamespaceMap { get; init; } = [];
/// <summary>
/// Gets the elements described by this document.
/// </summary>
[JsonPropertyName("element")]
public ImmutableArray<string> Element { get; init; } = [];
/// <summary>
/// Gets the root elements of this document.
/// </summary>
[JsonPropertyName("rootElement")]
public ImmutableArray<string> RootElement { get; init; } = [];
/// <summary>
/// Gets the imports (external document references).
/// </summary>
[JsonPropertyName("import")]
public ImmutableArray<Spdx3ExternalMap> Import { get; init; } = [];
}
/// <summary>
/// Represents a namespace mapping.
/// </summary>
public sealed record Spdx3NamespaceMap
{
/// <summary>
/// Gets the prefix.
/// </summary>
[JsonPropertyName("prefix")]
public required string Prefix { get; init; }
/// <summary>
/// Gets the namespace URI.
/// </summary>
[JsonPropertyName("namespace")]
public required string Namespace { get; init; }
}
/// <summary>
/// Represents an external map (import).
/// </summary>
public sealed record Spdx3ExternalMap
{
/// <summary>
/// Gets the external SPDX ID.
/// </summary>
[JsonPropertyName("externalSpdxId")]
public required string ExternalSpdxId { get; init; }
/// <summary>
/// Gets the verified using integrity methods.
/// </summary>
[JsonPropertyName("verifiedUsing")]
public ImmutableArray<Spdx3IntegrityMethod> VerifiedUsing { get; init; } = [];
/// <summary>
/// Gets the location hint.
/// </summary>
[JsonPropertyName("locationHint")]
public string? LocationHint { get; init; }
}

View File

@@ -0,0 +1,139 @@
// <copyright file="Spdx3CreationInfo.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Represents the creation information for SPDX 3.0.1 elements.
/// This captures who created the document, when, and with what tools.
/// </summary>
public sealed record Spdx3CreationInfo
{
/// <summary>
/// Gets the unique identifier for this CreationInfo.
/// </summary>
[JsonPropertyName("@id")]
public string? Id { get; init; }
/// <summary>
/// Gets the type.
/// </summary>
[JsonPropertyName("@type")]
public string? Type { get; init; }
/// <summary>
/// Gets the SPDX specification version.
/// Must be "3.0.1" for SPDX 3.0.1 documents.
/// </summary>
[Required]
[JsonPropertyName("specVersion")]
public required string SpecVersion { get; init; }
/// <summary>
/// Gets the creation timestamp in ISO 8601 format.
/// </summary>
[Required]
[JsonPropertyName("created")]
public required DateTimeOffset Created { get; init; }
/// <summary>
/// Gets the agents (persons/organizations) who created this.
/// </summary>
[JsonPropertyName("createdBy")]
public ImmutableArray<string> CreatedBy { get; init; } = [];
/// <summary>
/// Gets the tools used to create this.
/// </summary>
[JsonPropertyName("createdUsing")]
public ImmutableArray<string> CreatedUsing { get; init; } = [];
/// <summary>
/// Gets the profiles this document conforms to.
/// </summary>
[JsonPropertyName("profile")]
public ImmutableArray<Spdx3ProfileIdentifier> Profile { get; init; } = [];
/// <summary>
/// Gets the data license.
/// Must be CC0-1.0 for SPDX documents.
/// </summary>
[JsonPropertyName("dataLicense")]
public string? DataLicense { get; init; }
/// <summary>
/// Gets an optional comment about the creation.
/// </summary>
[JsonPropertyName("comment")]
public string? Comment { get; init; }
/// <summary>
/// The standard SPDX 3.0.1 spec version string.
/// </summary>
public const string Spdx301Version = "3.0.1";
/// <summary>
/// The required data license for SPDX documents.
/// </summary>
public const string Spdx301DataLicense = "CC0-1.0";
/// <summary>
/// Validates that the spec version is 3.0.1.
/// </summary>
/// <returns>True if valid SPDX 3.0.1 spec version.</returns>
public bool IsValidSpecVersion() =>
string.Equals(SpecVersion, Spdx301Version, StringComparison.Ordinal);
}
/// <summary>
/// Represents an Agent (person, organization, or software agent).
/// </summary>
public sealed record Spdx3Agent : Spdx3Element
{
/// <summary>
/// Gets the agent's name.
/// </summary>
[JsonPropertyName("name")]
public new required string Name { get; init; }
}
/// <summary>
/// Represents a Person agent.
/// </summary>
public sealed record Spdx3Person : Spdx3Element
{
/// <summary>
/// Gets the person's name.
/// </summary>
[JsonPropertyName("name")]
public new required string Name { get; init; }
}
/// <summary>
/// Represents an Organization agent.
/// </summary>
public sealed record Spdx3Organization : Spdx3Element
{
/// <summary>
/// Gets the organization's name.
/// </summary>
[JsonPropertyName("name")]
public new required string Name { get; init; }
}
/// <summary>
/// Represents a Tool agent.
/// </summary>
public sealed record Spdx3Tool : Spdx3Element
{
/// <summary>
/// Gets the tool's name.
/// </summary>
[JsonPropertyName("name")]
public new required string Name { get; init; }
}

View File

@@ -0,0 +1,218 @@
// <copyright file="Spdx3Document.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Spdx3.Model.Software;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Represents a parsed SPDX 3.0.1 document containing all elements.
/// </summary>
public sealed class Spdx3Document
{
private readonly Dictionary<string, Spdx3Element> _elementsById;
private readonly Dictionary<string, Spdx3CreationInfo> _creationInfoById;
/// <summary>
/// Initializes a new instance of the <see cref="Spdx3Document"/> class.
/// </summary>
/// <param name="elements">All elements in the document.</param>
/// <param name="creationInfos">All CreationInfo objects.</param>
/// <param name="profiles">Detected profile conformance.</param>
/// <param name="spdxDocument">The root SpdxDocument element if present.</param>
public Spdx3Document(
IEnumerable<Spdx3Element> elements,
IEnumerable<Spdx3CreationInfo> creationInfos,
IEnumerable<Spdx3ProfileIdentifier> profiles,
Spdx3SpdxDocument? spdxDocument = null)
{
var elementList = elements.ToList();
// Use GroupBy to handle duplicates - last element wins for lookup but keeps all for validation
_elementsById = elementList
.GroupBy(e => e.SpdxId, StringComparer.Ordinal)
.ToDictionary(g => g.Key, g => g.Last(), StringComparer.Ordinal);
_creationInfoById = creationInfos
.Where(c => c.Id != null)
.GroupBy(c => c.Id!, StringComparer.Ordinal)
.ToDictionary(g => g.Key, g => g.Last(), StringComparer.Ordinal);
Profiles = profiles.ToImmutableHashSet();
SpdxDocument = spdxDocument;
// Categorize elements by type - use original list to preserve duplicates for counting
Packages = elementList.OfType<Spdx3Package>().ToImmutableArray();
Files = elementList.OfType<Spdx3File>().ToImmutableArray();
Snippets = elementList.OfType<Spdx3Snippet>().ToImmutableArray();
Relationships = elementList.OfType<Spdx3Relationship>().ToImmutableArray();
AllElements = elementList.ToImmutableArray();
}
/// <summary>
/// Gets all elements including duplicates (for validation).
/// </summary>
public ImmutableArray<Spdx3Element> AllElements { get; }
/// <summary>
/// Gets the root SpdxDocument element if present.
/// </summary>
public Spdx3SpdxDocument? SpdxDocument { get; }
/// <summary>
/// Gets all elements in the document.
/// </summary>
public IReadOnlyCollection<Spdx3Element> Elements => _elementsById.Values;
/// <summary>
/// Gets all packages in the document.
/// </summary>
public ImmutableArray<Spdx3Package> Packages { get; }
/// <summary>
/// Gets all files in the document.
/// </summary>
public ImmutableArray<Spdx3File> Files { get; }
/// <summary>
/// Gets all snippets in the document.
/// </summary>
public ImmutableArray<Spdx3Snippet> Snippets { get; }
/// <summary>
/// Gets all relationships in the document.
/// </summary>
public ImmutableArray<Spdx3Relationship> Relationships { get; }
/// <summary>
/// Gets the detected profile conformance.
/// </summary>
public ImmutableHashSet<Spdx3ProfileIdentifier> Profiles { get; }
/// <summary>
/// Gets an element by its SPDX ID.
/// </summary>
/// <param name="spdxId">The SPDX ID.</param>
/// <returns>The element, or null if not found.</returns>
public Spdx3Element? GetById(string spdxId)
{
return _elementsById.TryGetValue(spdxId, out var element) ? element : null;
}
/// <summary>
/// Gets an element by its SPDX ID as a specific type.
/// </summary>
/// <typeparam name="T">The element type.</typeparam>
/// <param name="spdxId">The SPDX ID.</param>
/// <returns>The element, or null if not found or wrong type.</returns>
public T? GetById<T>(string spdxId) where T : Spdx3Element
{
return GetById(spdxId) as T;
}
/// <summary>
/// Gets a CreationInfo by its ID.
/// </summary>
/// <param name="id">The CreationInfo ID.</param>
/// <returns>The CreationInfo, or null if not found.</returns>
public Spdx3CreationInfo? GetCreationInfo(string id)
{
return _creationInfoById.TryGetValue(id, out var info) ? info : null;
}
/// <summary>
/// Gets relationships where the given element is the source.
/// </summary>
/// <param name="spdxId">The source element ID.</param>
/// <returns>Matching relationships.</returns>
public IEnumerable<Spdx3Relationship> GetRelationshipsFrom(string spdxId)
{
return Relationships.Where(r => r.From == spdxId);
}
/// <summary>
/// Gets relationships where the given element is a target.
/// </summary>
/// <param name="spdxId">The target element ID.</param>
/// <returns>Matching relationships.</returns>
public IEnumerable<Spdx3Relationship> GetRelationshipsTo(string spdxId)
{
return Relationships.Where(r => r.To.Contains(spdxId));
}
/// <summary>
/// Gets direct dependencies of a package.
/// </summary>
/// <param name="packageId">The package SPDX ID.</param>
/// <returns>Dependent packages.</returns>
public IEnumerable<Spdx3Package> GetDependencies(string packageId)
{
return GetRelationshipsFrom(packageId)
.Where(r => r.RelationshipType == Spdx3RelationshipType.DependsOn)
.SelectMany(r => r.To)
.Select(GetById<Spdx3Package>)
.Where(p => p != null)
.Cast<Spdx3Package>();
}
/// <summary>
/// Gets all files contained in a package.
/// </summary>
/// <param name="packageId">The package SPDX ID.</param>
/// <returns>Contained files.</returns>
public IEnumerable<Spdx3File> GetContainedFiles(string packageId)
{
return GetRelationshipsFrom(packageId)
.Where(r => r.RelationshipType == Spdx3RelationshipType.Contains)
.SelectMany(r => r.To)
.Select(GetById<Spdx3File>)
.Where(f => f != null)
.Cast<Spdx3File>();
}
/// <summary>
/// Checks if the document conforms to a specific profile.
/// </summary>
/// <param name="profile">The profile to check.</param>
/// <returns>True if the document conforms.</returns>
public bool ConformsTo(Spdx3ProfileIdentifier profile)
{
return Profiles.Contains(profile);
}
/// <summary>
/// Gets all PURLs from packages in the document.
/// </summary>
/// <returns>Package URLs.</returns>
public IEnumerable<string> GetAllPurls()
{
return Packages
.SelectMany(p => p.ExternalIdentifier)
.Where(i => i.ExternalIdentifierType == Spdx3ExternalIdentifierType.PackageUrl)
.Select(i => i.Identifier)
.Distinct(StringComparer.Ordinal);
}
/// <summary>
/// Gets the root package (if any).
/// </summary>
/// <returns>The root package, or null.</returns>
public Spdx3Package? GetRootPackage()
{
if (SpdxDocument?.RootElement.Length > 0)
{
var rootId = SpdxDocument.RootElement[0];
return GetById<Spdx3Package>(rootId);
}
// Fallback: find package with no incoming Contains relationships
var containedIds = Relationships
.Where(r => r.RelationshipType == Spdx3RelationshipType.Contains)
.SelectMany(r => r.To)
.ToHashSet(StringComparer.Ordinal);
return Packages.FirstOrDefault(p => !containedIds.Contains(p.SpdxId));
}
}

View File

@@ -0,0 +1,113 @@
// <copyright file="Spdx3Element.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Base class for all SPDX 3.0.1 elements.
/// Every element in SPDX 3.0 derives from this abstract type.
/// </summary>
public abstract record Spdx3Element
{
/// <summary>
/// Gets the unique IRI identifier for this element.
/// </summary>
/// <remarks>
/// This must be globally unique and serves as the element's identity.
/// Format: URN, URL, or document-scoped ID with #fragment.
/// </remarks>
[Required]
[JsonPropertyName("spdxId")]
public required string SpdxId { get; init; }
/// <summary>
/// Gets the element type from JSON-LD.
/// </summary>
[JsonPropertyName("@type")]
public string? Type { get; init; }
/// <summary>
/// Gets the reference to creation information.
/// Used when CreationInfo is shared across elements.
/// </summary>
[JsonPropertyName("creationInfo")]
public string? CreationInfoRef { get; init; }
/// <summary>
/// Gets the inline creation information.
/// Used when CreationInfo is specific to this element.
/// </summary>
[JsonIgnore]
public Spdx3CreationInfo? CreationInfo { get; init; }
/// <summary>
/// Gets the human-readable name.
/// </summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
/// <summary>
/// Gets a brief summary (short description).
/// </summary>
[JsonPropertyName("summary")]
public string? Summary { get; init; }
/// <summary>
/// Gets a detailed description.
/// </summary>
[JsonPropertyName("description")]
public string? Description { get; init; }
/// <summary>
/// Gets an optional comment about this element.
/// </summary>
[JsonPropertyName("comment")]
public string? Comment { get; init; }
/// <summary>
/// Gets integrity verification methods (hashes, signatures).
/// </summary>
[JsonPropertyName("verifiedUsing")]
public ImmutableArray<Spdx3IntegrityMethod> VerifiedUsing { get; init; } = [];
/// <summary>
/// Gets external references (security advisories, etc.).
/// </summary>
[JsonPropertyName("externalRef")]
public ImmutableArray<Spdx3ExternalRef> ExternalRef { get; init; } = [];
/// <summary>
/// Gets external identifiers (PURL, CPE, SWID, etc.).
/// </summary>
[JsonPropertyName("externalIdentifier")]
public ImmutableArray<Spdx3ExternalIdentifier> ExternalIdentifier { get; init; } = [];
/// <summary>
/// Gets profile-specific extensions.
/// </summary>
[JsonPropertyName("extension")]
public ImmutableArray<Spdx3Extension> Extension { get; init; } = [];
}
/// <summary>
/// Represents a profile-specific extension.
/// </summary>
public sealed record Spdx3Extension
{
/// <summary>
/// Gets the extension type.
/// </summary>
[JsonPropertyName("@type")]
public string? Type { get; init; }
/// <summary>
/// Gets the extension data as raw JSON.
/// </summary>
[JsonExtensionData]
public Dictionary<string, object?>? ExtensionData { get; init; }
}

View File

@@ -0,0 +1,148 @@
// <copyright file="Spdx3ExternalIdentifier.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Represents an external identifier in SPDX 3.0.1.
/// Used for PURL, CPE, SWID, and other identifiers.
/// </summary>
public sealed record Spdx3ExternalIdentifier
{
/// <summary>
/// Gets the type.
/// </summary>
[JsonPropertyName("@type")]
public string? Type { get; init; }
/// <summary>
/// Gets the identifier type.
/// </summary>
[JsonPropertyName("externalIdentifierType")]
public Spdx3ExternalIdentifierType? ExternalIdentifierType { get; init; }
/// <summary>
/// Gets the identifier value.
/// </summary>
[JsonPropertyName("identifier")]
public required string Identifier { get; init; }
/// <summary>
/// Gets an optional comment.
/// </summary>
[JsonPropertyName("comment")]
public string? Comment { get; init; }
/// <summary>
/// Gets the identifying organization.
/// </summary>
[JsonPropertyName("issuingAuthority")]
public string? IssuingAuthority { get; init; }
}
/// <summary>
/// External identifier types in SPDX 3.0.1.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3ExternalIdentifierType
{
/// <summary>
/// CPE 2.2 identifier.
/// </summary>
Cpe22,
/// <summary>
/// CPE 2.3 identifier.
/// </summary>
Cpe23,
/// <summary>
/// CVE identifier.
/// </summary>
Cve,
/// <summary>
/// Email identifier.
/// </summary>
Email,
/// <summary>
/// Git object identifier (SHA).
/// </summary>
GitOid,
/// <summary>
/// Other identifier type.
/// </summary>
Other,
/// <summary>
/// Package URL (PURL).
/// </summary>
PackageUrl,
/// <summary>
/// Security advisory identifier.
/// </summary>
SecurityOther,
/// <summary>
/// SWHID (Software Heritage ID).
/// </summary>
Swhid,
/// <summary>
/// SWID tag identifier.
/// </summary>
Swid,
/// <summary>
/// URL identifier.
/// </summary>
UrlScheme
}
/// <summary>
/// Extension methods for external identifiers.
/// </summary>
public static class Spdx3ExternalIdentifierExtensions
{
/// <summary>
/// Extracts PURL from external identifiers.
/// </summary>
/// <param name="identifiers">The identifiers to search.</param>
/// <returns>The PURL if found, otherwise null.</returns>
public static string? GetPurl(this IEnumerable<Spdx3ExternalIdentifier> identifiers)
{
return identifiers
.FirstOrDefault(i => i.ExternalIdentifierType == Spdx3ExternalIdentifierType.PackageUrl)
?.Identifier;
}
/// <summary>
/// Extracts CPE 2.3 from external identifiers.
/// </summary>
/// <param name="identifiers">The identifiers to search.</param>
/// <returns>The CPE 2.3 if found, otherwise null.</returns>
public static string? GetCpe23(this IEnumerable<Spdx3ExternalIdentifier> identifiers)
{
return identifiers
.FirstOrDefault(i => i.ExternalIdentifierType == Spdx3ExternalIdentifierType.Cpe23)
?.Identifier;
}
/// <summary>
/// Extracts SWHID from external identifiers.
/// </summary>
/// <param name="identifiers">The identifiers to search.</param>
/// <returns>The SWHID if found, otherwise null.</returns>
public static string? GetSwhid(this IEnumerable<Spdx3ExternalIdentifier> identifiers)
{
return identifiers
.FirstOrDefault(i => i.ExternalIdentifierType == Spdx3ExternalIdentifierType.Swhid)
?.Identifier;
}
}

View File

@@ -0,0 +1,291 @@
// <copyright file="Spdx3ExternalRef.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Represents an external reference in SPDX 3.0.1.
/// </summary>
public sealed record Spdx3ExternalRef
{
/// <summary>
/// Gets the type.
/// </summary>
[JsonPropertyName("@type")]
public string? Type { get; init; }
/// <summary>
/// Gets the external reference type.
/// </summary>
[JsonPropertyName("externalRefType")]
public Spdx3ExternalRefType? ExternalRefType { get; init; }
/// <summary>
/// Gets the locator (URI or identifier).
/// </summary>
[JsonPropertyName("locator")]
public ImmutableArray<string> Locator { get; init; } = [];
/// <summary>
/// Gets the content type (MIME type).
/// </summary>
[JsonPropertyName("contentType")]
public string? ContentType { get; init; }
/// <summary>
/// Gets an optional comment.
/// </summary>
[JsonPropertyName("comment")]
public string? Comment { get; init; }
}
/// <summary>
/// External reference types in SPDX 3.0.1.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3ExternalRefType
{
/// <summary>
/// Alternate web page.
/// </summary>
AltWebPage,
/// <summary>
/// Alternate download location.
/// </summary>
AltDownloadLocation,
/// <summary>
/// Binary artifact.
/// </summary>
BinaryArtifact,
/// <summary>
/// Bower package reference.
/// </summary>
Bower,
/// <summary>
/// Build metadata.
/// </summary>
BuildMeta,
/// <summary>
/// Build system reference.
/// </summary>
BuildSystem,
/// <summary>
/// Certification reference.
/// </summary>
Certification,
/// <summary>
/// Chat reference.
/// </summary>
Chat,
/// <summary>
/// Component analysis report.
/// </summary>
ComponentAnalysisReport,
/// <summary>
/// CPE 2.2 identifier.
/// </summary>
Cpe22Type,
/// <summary>
/// CPE 2.3 identifier.
/// </summary>
Cpe23Type,
/// <summary>
/// CWE reference.
/// </summary>
Cwe,
/// <summary>
/// Documentation reference.
/// </summary>
Documentation,
/// <summary>
/// Dynamic analysis report.
/// </summary>
DynamicAnalysisReport,
/// <summary>
/// End of life information.
/// </summary>
EolNotice,
/// <summary>
/// Export control classification.
/// </summary>
ExportControlClassification,
/// <summary>
/// Funding reference.
/// </summary>
Funding,
/// <summary>
/// Issue tracker.
/// </summary>
IssueTracker,
/// <summary>
/// License reference.
/// </summary>
License,
/// <summary>
/// Mailing list reference.
/// </summary>
MailingList,
/// <summary>
/// Maven Central reference.
/// </summary>
MavenCentral,
/// <summary>
/// Metrics reference.
/// </summary>
Metrics,
/// <summary>
/// NPM package reference.
/// </summary>
Npm,
/// <summary>
/// NuGet package reference.
/// </summary>
Nuget,
/// <summary>
/// Other reference type.
/// </summary>
Other,
/// <summary>
/// Privacy assessment reference.
/// </summary>
PrivacyAssessment,
/// <summary>
/// Product metadata reference.
/// </summary>
ProductMetadata,
/// <summary>
/// Purchase order reference.
/// </summary>
PurchaseOrder,
/// <summary>
/// Quality assessment report.
/// </summary>
QualityAssessmentReport,
/// <summary>
/// Release history reference.
/// </summary>
ReleaseHistory,
/// <summary>
/// Release notes reference.
/// </summary>
ReleaseNotes,
/// <summary>
/// Risk assessment reference.
/// </summary>
RiskAssessment,
/// <summary>
/// Runtime analysis report.
/// </summary>
RuntimeAnalysisReport,
/// <summary>
/// Secure software development attestation.
/// </summary>
SecureSoftwareAttestation,
/// <summary>
/// Security adversary model.
/// </summary>
SecurityAdversaryModel,
/// <summary>
/// Security advisory.
/// </summary>
SecurityAdvisory,
/// <summary>
/// Security fix reference.
/// </summary>
SecurityFix,
/// <summary>
/// Security other reference.
/// </summary>
SecurityOther,
/// <summary>
/// Security penetration test.
/// </summary>
SecurityPenTestReport,
/// <summary>
/// Security policy.
/// </summary>
SecurityPolicy,
/// <summary>
/// Security threat model.
/// </summary>
SecurityThreatModel,
/// <summary>
/// Social media reference.
/// </summary>
SocialMedia,
/// <summary>
/// Source artifact.
/// </summary>
SourceArtifact,
/// <summary>
/// Static analysis report.
/// </summary>
StaticAnalysisReport,
/// <summary>
/// Support reference.
/// </summary>
Support,
/// <summary>
/// VCS (version control system) reference.
/// </summary>
Vcs,
/// <summary>
/// Vulnerability disclosure report.
/// </summary>
VulnerabilityDisclosureReport,
/// <summary>
/// Vulnerability exploitability assessment.
/// </summary>
VulnerabilityExploitabilityAssessment
}

View File

@@ -0,0 +1,225 @@
// <copyright file="Spdx3IntegrityMethod.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Globalization;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Base class for integrity verification methods in SPDX 3.0.1.
/// </summary>
public abstract record Spdx3IntegrityMethod
{
/// <summary>
/// Gets the type.
/// </summary>
[JsonPropertyName("@type")]
public string? Type { get; init; }
/// <summary>
/// Gets an optional comment.
/// </summary>
[JsonPropertyName("comment")]
public string? Comment { get; init; }
}
/// <summary>
/// Represents a hash integrity method.
/// </summary>
public sealed record Spdx3Hash : Spdx3IntegrityMethod
{
/// <summary>
/// Gets the hash algorithm.
/// </summary>
[JsonPropertyName("algorithm")]
public required Spdx3HashAlgorithm Algorithm { get; init; }
/// <summary>
/// Gets the hash value (lowercase hex).
/// </summary>
[JsonPropertyName("hashValue")]
public required string HashValue { get; init; }
/// <summary>
/// Gets the normalized hash value (lowercase).
/// </summary>
[JsonIgnore]
public string NormalizedHashValue => HashValue.ToLowerInvariant();
/// <summary>
/// Validates that the hash value is valid hex.
/// </summary>
/// <returns>True if valid hex.</returns>
public bool IsValidHex()
{
return HashValue.All(c => char.IsAsciiHexDigit(c));
}
/// <summary>
/// Gets the expected hash length for the algorithm.
/// </summary>
/// <returns>Expected length in hex characters.</returns>
public int GetExpectedLength() => Algorithm switch
{
Spdx3HashAlgorithm.Sha256 => 64,
Spdx3HashAlgorithm.Sha384 => 96,
Spdx3HashAlgorithm.Sha512 => 128,
Spdx3HashAlgorithm.Sha3_256 => 64,
Spdx3HashAlgorithm.Sha3_384 => 96,
Spdx3HashAlgorithm.Sha3_512 => 128,
Spdx3HashAlgorithm.Blake2b256 => 64,
Spdx3HashAlgorithm.Blake2b384 => 96,
Spdx3HashAlgorithm.Blake2b512 => 128,
Spdx3HashAlgorithm.Blake3 => 64,
Spdx3HashAlgorithm.Md5 => 32,
Spdx3HashAlgorithm.Sha1 => 40,
Spdx3HashAlgorithm.Adler32 => 8,
_ => -1
};
/// <summary>
/// Validates the hash length matches the algorithm.
/// </summary>
/// <returns>True if valid length.</returns>
public bool IsValidLength()
{
var expected = GetExpectedLength();
return expected == -1 || HashValue.Length == expected;
}
}
/// <summary>
/// Hash algorithms supported by SPDX 3.0.1.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3HashAlgorithm
{
/// <summary>
/// Adler-32 checksum.
/// </summary>
Adler32,
/// <summary>
/// BLAKE2b-256.
/// </summary>
Blake2b256,
/// <summary>
/// BLAKE2b-384.
/// </summary>
Blake2b384,
/// <summary>
/// BLAKE2b-512.
/// </summary>
Blake2b512,
/// <summary>
/// BLAKE3.
/// </summary>
Blake3,
/// <summary>
/// MD2 (deprecated).
/// </summary>
Md2,
/// <summary>
/// MD4 (deprecated).
/// </summary>
Md4,
/// <summary>
/// MD5 (deprecated).
/// </summary>
Md5,
/// <summary>
/// MD6.
/// </summary>
Md6,
/// <summary>
/// SHA-1 (deprecated).
/// </summary>
Sha1,
/// <summary>
/// SHA-224.
/// </summary>
Sha224,
/// <summary>
/// SHA-256 (recommended).
/// </summary>
Sha256,
/// <summary>
/// SHA-384.
/// </summary>
Sha384,
/// <summary>
/// SHA-512 (recommended).
/// </summary>
Sha512,
/// <summary>
/// SHA3-224.
/// </summary>
Sha3_224,
/// <summary>
/// SHA3-256 (recommended).
/// </summary>
Sha3_256,
/// <summary>
/// SHA3-384.
/// </summary>
Sha3_384,
/// <summary>
/// SHA3-512 (recommended).
/// </summary>
Sha3_512
}
/// <summary>
/// Extension methods for hash algorithms.
/// </summary>
public static class Spdx3HashAlgorithmExtensions
{
/// <summary>
/// Gets whether this algorithm is recommended for use.
/// </summary>
/// <param name="algorithm">The algorithm.</param>
/// <returns>True if recommended.</returns>
public static bool IsRecommended(this Spdx3HashAlgorithm algorithm) => algorithm switch
{
Spdx3HashAlgorithm.Sha256 => true,
Spdx3HashAlgorithm.Sha512 => true,
Spdx3HashAlgorithm.Sha3_256 => true,
Spdx3HashAlgorithm.Sha3_512 => true,
Spdx3HashAlgorithm.Blake2b256 => true,
Spdx3HashAlgorithm.Blake2b512 => true,
Spdx3HashAlgorithm.Blake3 => true,
_ => false
};
/// <summary>
/// Gets whether this algorithm is deprecated.
/// </summary>
/// <param name="algorithm">The algorithm.</param>
/// <returns>True if deprecated.</returns>
public static bool IsDeprecated(this Spdx3HashAlgorithm algorithm) => algorithm switch
{
Spdx3HashAlgorithm.Md2 => true,
Spdx3HashAlgorithm.Md4 => true,
Spdx3HashAlgorithm.Md5 => true,
Spdx3HashAlgorithm.Sha1 => true,
_ => false
};
}

View File

@@ -0,0 +1,179 @@
// <copyright file="Spdx3ProfileIdentifier.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// SPDX 3.0.1 profile identifiers.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3ProfileIdentifier
{
/// <summary>
/// Core profile (required for all documents).
/// </summary>
Core,
/// <summary>
/// Software profile (packages, files, snippets).
/// </summary>
Software,
/// <summary>
/// Security profile (vulnerabilities, VEX).
/// </summary>
Security,
/// <summary>
/// Licensing profile (license expressions).
/// </summary>
Licensing,
/// <summary>
/// Build profile (build metadata, attestation).
/// </summary>
Build,
/// <summary>
/// AI profile (ML models, datasets).
/// </summary>
AI,
/// <summary>
/// Dataset profile (data catalogs).
/// </summary>
Dataset,
/// <summary>
/// Lite profile (minimal SBOM).
/// </summary>
Lite,
/// <summary>
/// Extension profile (custom extensions).
/// </summary>
Extension
}
/// <summary>
/// Profile URI constants for SPDX 3.0.1.
/// </summary>
public static class Spdx3ProfileUris
{
/// <summary>
/// Base URI for SPDX 3.0.1 profiles.
/// </summary>
public const string BaseUri = "https://spdx.org/rdf/3.0.1/terms/";
/// <summary>
/// Core profile URI.
/// </summary>
public const string Core = "https://spdx.org/rdf/3.0.1/terms/Core";
/// <summary>
/// Software profile URI.
/// </summary>
public const string Software = "https://spdx.org/rdf/3.0.1/terms/Software";
/// <summary>
/// Security profile URI.
/// </summary>
public const string Security = "https://spdx.org/rdf/3.0.1/terms/Security";
/// <summary>
/// Licensing profile URI.
/// </summary>
public const string Licensing = "https://spdx.org/rdf/3.0.1/terms/Licensing";
/// <summary>
/// Build profile URI.
/// </summary>
public const string Build = "https://spdx.org/rdf/3.0.1/terms/Build";
/// <summary>
/// AI profile URI.
/// </summary>
public const string AI = "https://spdx.org/rdf/3.0.1/terms/AI";
/// <summary>
/// Dataset profile URI.
/// </summary>
public const string Dataset = "https://spdx.org/rdf/3.0.1/terms/Dataset";
/// <summary>
/// Lite profile URI.
/// </summary>
public const string Lite = "https://spdx.org/rdf/3.0.1/terms/Lite";
/// <summary>
/// Extension profile URI.
/// </summary>
public const string Extension = "https://spdx.org/rdf/3.0.1/terms/Extension";
/// <summary>
/// Gets the URI for a profile identifier.
/// </summary>
/// <param name="profile">The profile identifier.</param>
/// <returns>The profile URI.</returns>
public static string GetUri(Spdx3ProfileIdentifier profile) => profile switch
{
Spdx3ProfileIdentifier.Core => Core,
Spdx3ProfileIdentifier.Software => Software,
Spdx3ProfileIdentifier.Security => Security,
Spdx3ProfileIdentifier.Licensing => Licensing,
Spdx3ProfileIdentifier.Build => Build,
Spdx3ProfileIdentifier.AI => AI,
Spdx3ProfileIdentifier.Dataset => Dataset,
Spdx3ProfileIdentifier.Lite => Lite,
Spdx3ProfileIdentifier.Extension => Extension,
_ => throw new ArgumentOutOfRangeException(nameof(profile), profile, null)
};
/// <summary>
/// Parses a profile URI to a profile identifier.
/// </summary>
/// <param name="uri">The URI to parse.</param>
/// <returns>The profile identifier, or null if not recognized.</returns>
public static Spdx3ProfileIdentifier? ParseUri(string uri)
{
return uri switch
{
Core => Spdx3ProfileIdentifier.Core,
Software => Spdx3ProfileIdentifier.Software,
Security => Spdx3ProfileIdentifier.Security,
Licensing => Spdx3ProfileIdentifier.Licensing,
Build => Spdx3ProfileIdentifier.Build,
AI => Spdx3ProfileIdentifier.AI,
Dataset => Spdx3ProfileIdentifier.Dataset,
Lite => Spdx3ProfileIdentifier.Lite,
Extension => Spdx3ProfileIdentifier.Extension,
_ => null
};
}
/// <summary>
/// Parses a profile string (name or URI) to a profile identifier.
/// </summary>
/// <param name="value">The value to parse.</param>
/// <returns>The profile identifier, or null if not recognized.</returns>
public static Spdx3ProfileIdentifier? Parse(string value)
{
// Try as URI first
var fromUri = ParseUri(value);
if (fromUri.HasValue)
{
return fromUri;
}
// Try as enum name
if (Enum.TryParse<Spdx3ProfileIdentifier>(value, ignoreCase: true, out var result))
{
return result;
}
return null;
}
}

View File

@@ -0,0 +1,263 @@
// <copyright file="Spdx3Relationship.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using System.Text.Json.Serialization;
namespace StellaOps.Spdx3.Model;
/// <summary>
/// Represents a relationship between SPDX 3.0.1 elements.
/// </summary>
public sealed record Spdx3Relationship : Spdx3Element
{
/// <summary>
/// Gets the source element of the relationship.
/// </summary>
[Required]
[JsonPropertyName("from")]
public required string From { get; init; }
/// <summary>
/// Gets the target element(s) of the relationship.
/// </summary>
[Required]
[JsonPropertyName("to")]
public required ImmutableArray<string> To { get; init; }
/// <summary>
/// Gets the type of relationship.
/// </summary>
[Required]
[JsonPropertyName("relationshipType")]
public required Spdx3RelationshipType RelationshipType { get; init; }
/// <summary>
/// Gets the completeness of the relationship.
/// </summary>
[JsonPropertyName("completeness")]
public Spdx3RelationshipCompleteness? Completeness { get; init; }
/// <summary>
/// Gets the start time of the relationship (temporal scope).
/// </summary>
[JsonPropertyName("startTime")]
public DateTimeOffset? StartTime { get; init; }
/// <summary>
/// Gets the end time of the relationship (temporal scope).
/// </summary>
[JsonPropertyName("endTime")]
public DateTimeOffset? EndTime { get; init; }
}
/// <summary>
/// SPDX 3.0.1 relationship types.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3RelationshipType
{
/// <summary>
/// Element A contains Element B.
/// </summary>
Contains,
/// <summary>
/// Element A is contained by Element B.
/// </summary>
ContainedBy,
/// <summary>
/// Element A depends on Element B.
/// </summary>
DependsOn,
/// <summary>
/// Element A is a dependency of Element B.
/// </summary>
DependencyOf,
/// <summary>
/// Element A is a build tool of Element B.
/// </summary>
BuildToolOf,
/// <summary>
/// Element A is a dev tool of Element B.
/// </summary>
DevToolOf,
/// <summary>
/// Element A is a test tool of Element B.
/// </summary>
TestToolOf,
/// <summary>
/// Element A is documentation of Element B.
/// </summary>
DocumentationOf,
/// <summary>
/// Element A is an optional component of Element B.
/// </summary>
OptionalComponentOf,
/// <summary>
/// Element A is a provided dependency of Element B.
/// </summary>
ProvidedDependencyOf,
/// <summary>
/// Element A is a test of Element B.
/// </summary>
TestOf,
/// <summary>
/// Element A is a test case of Element B.
/// </summary>
TestCaseOf,
/// <summary>
/// Element A is a copy of Element B.
/// </summary>
CopyOf,
/// <summary>
/// Element A is a file added to Element B.
/// </summary>
FileAddedTo,
/// <summary>
/// Element A is a file deleted from Element B.
/// </summary>
FileDeletedFrom,
/// <summary>
/// Element A is a file modified in Element B.
/// </summary>
FileModified,
/// <summary>
/// Element A was expanded from archive Element B.
/// </summary>
ExpandedFromArchive,
/// <summary>
/// Element A dynamically links to Element B.
/// </summary>
DynamicLink,
/// <summary>
/// Element A statically links to Element B.
/// </summary>
StaticLink,
/// <summary>
/// Element A is a data file of Element B.
/// </summary>
DataFileOf,
/// <summary>
/// Element A was generated from Element B.
/// </summary>
GeneratedFrom,
/// <summary>
/// Element A generates Element B.
/// </summary>
Generates,
/// <summary>
/// Element A is an ancestor of Element B.
/// </summary>
AncestorOf,
/// <summary>
/// Element A is a descendant of Element B.
/// </summary>
DescendantOf,
/// <summary>
/// Element A is a variant of Element B.
/// </summary>
VariantOf,
/// <summary>
/// Element A is a distribution artifact of Element B.
/// </summary>
DistributionArtifact,
/// <summary>
/// Element A is a patch for Element B.
/// </summary>
PatchFor,
/// <summary>
/// Element A is a requirement for Element B.
/// </summary>
RequirementFor,
/// <summary>
/// Element A is a specification for Element B.
/// </summary>
SpecificationFor,
/// <summary>
/// Element A is amended by Element B.
/// </summary>
AmendedBy,
/// <summary>
/// Element A describes Element B.
/// </summary>
Describes,
/// <summary>
/// Element A is described by Element B.
/// </summary>
DescribedBy,
/// <summary>
/// Element A has a prerequisite Element B.
/// </summary>
HasPrerequisite,
/// <summary>
/// Element A is a prerequisite of Element B.
/// </summary>
PrerequisiteFor,
/// <summary>
/// Element A has evidence of Element B.
/// </summary>
HasEvidence,
/// <summary>
/// Other relationship type (requires comment).
/// </summary>
Other
}
/// <summary>
/// Completeness of a relationship.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Spdx3RelationshipCompleteness
{
/// <summary>
/// The relationship is complete.
/// </summary>
Complete,
/// <summary>
/// The relationship is incomplete.
/// </summary>
Incomplete,
/// <summary>
/// No assertion about completeness.
/// </summary>
NoAssertion
}

View File

@@ -0,0 +1,587 @@
// <copyright file="Spdx3Parser.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Spdx3.JsonLd;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Software;
namespace StellaOps.Spdx3;
/// <summary>
/// SPDX 3.0.1 JSON-LD parser implementation.
/// </summary>
public sealed class Spdx3Parser : ISpdx3Parser
{
private readonly ISpdx3ContextResolver _contextResolver;
private readonly ILogger<Spdx3Parser> _logger;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNameCaseInsensitive = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
};
/// <summary>
/// Initializes a new instance of the <see cref="Spdx3Parser"/> class.
/// </summary>
public Spdx3Parser(
ISpdx3ContextResolver contextResolver,
ILogger<Spdx3Parser> logger)
{
_contextResolver = contextResolver;
_logger = logger;
}
/// <inheritdoc />
public async Task<Spdx3ParseResult> ParseAsync(
Stream stream,
CancellationToken cancellationToken = default)
{
try
{
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return await ParseDocumentAsync(document, cancellationToken)
.ConfigureAwait(false);
}
catch (JsonException ex)
{
_logger.LogError(ex, "Failed to parse SPDX 3.0.1 JSON");
return Spdx3ParseResult.Failed("JSON_PARSE_ERROR", ex.Message, ex.Path);
}
}
/// <inheritdoc />
public async Task<Spdx3ParseResult> ParseAsync(
string filePath,
CancellationToken cancellationToken = default)
{
if (!File.Exists(filePath))
{
return Spdx3ParseResult.Failed("FILE_NOT_FOUND", $"File not found: {filePath}");
}
await using var stream = File.OpenRead(filePath);
return await ParseAsync(stream, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<Spdx3ParseResult> ParseFromJsonAsync(
string json,
CancellationToken cancellationToken = default)
{
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(json));
return await ParseAsync(stream, cancellationToken).ConfigureAwait(false);
}
private async Task<Spdx3ParseResult> ParseDocumentAsync(
JsonDocument document,
CancellationToken cancellationToken)
{
var root = document.RootElement;
var errors = new List<Spdx3ParseError>();
var warnings = new List<Spdx3ParseWarning>();
// Check for @context (JSON-LD indicator)
if (!root.TryGetProperty("@context", out var contextElement))
{
return Spdx3ParseResult.Failed("MISSING_CONTEXT", "No @context found. Is this an SPDX 3.0.1 JSON-LD document?");
}
// Resolve context (for future expansion support)
var contextRef = GetContextReference(contextElement);
if (!string.IsNullOrEmpty(contextRef))
{
await _contextResolver.ResolveAsync(contextRef, cancellationToken).ConfigureAwait(false);
}
// Parse @graph (main element array)
var elements = new List<Spdx3Element>();
var creationInfos = new List<Spdx3CreationInfo>();
var profiles = new HashSet<Spdx3ProfileIdentifier>();
Spdx3SpdxDocument? spdxDocument = null;
if (root.TryGetProperty("@graph", out var graphElement) && graphElement.ValueKind == JsonValueKind.Array)
{
foreach (var element in graphElement.EnumerateArray())
{
var parsed = ParseElement(element, errors, warnings);
if (parsed != null)
{
elements.Add(parsed);
if (parsed is Spdx3SpdxDocument doc)
{
spdxDocument = doc;
}
}
// Also extract CreationInfo
var creationInfo = ExtractCreationInfo(element, errors);
if (creationInfo != null)
{
creationInfos.Add(creationInfo);
foreach (var profile in creationInfo.Profile)
{
profiles.Add(profile);
}
}
}
}
else
{
// Single document format (no @graph)
var parsed = ParseElement(root, errors, warnings);
if (parsed != null)
{
elements.Add(parsed);
if (parsed is Spdx3SpdxDocument doc)
{
spdxDocument = doc;
}
}
}
if (errors.Count > 0)
{
return Spdx3ParseResult.Failed(errors, warnings);
}
var result = new Spdx3Document(elements, creationInfos, profiles, spdxDocument);
return Spdx3ParseResult.Succeeded(result, warnings);
}
private Spdx3Element? ParseElement(
JsonElement element,
List<Spdx3ParseError> errors,
List<Spdx3ParseWarning> warnings)
{
if (element.ValueKind != JsonValueKind.Object)
{
return null;
}
// Get @type to determine element type
var type = GetStringProperty(element, "@type") ?? GetStringProperty(element, "type");
if (string.IsNullOrEmpty(type))
{
return null;
}
// Get spdxId (required)
var spdxId = GetStringProperty(element, "spdxId") ?? GetStringProperty(element, "@id");
if (string.IsNullOrEmpty(spdxId))
{
warnings.Add(new Spdx3ParseWarning("MISSING_SPDX_ID", $"Element of type {type} has no spdxId"));
return null;
}
return type switch
{
"software_Package" or "Package" or "spdx:software_Package" =>
ParsePackage(element, spdxId),
"software_File" or "File" or "spdx:software_File" =>
ParseFile(element, spdxId),
"software_Snippet" or "Snippet" or "spdx:software_Snippet" =>
ParseSnippet(element, spdxId),
"software_SpdxDocument" or "SpdxDocument" or "spdx:software_SpdxDocument" =>
ParseSpdxDocument(element, spdxId),
"Relationship" or "spdx:Relationship" =>
ParseRelationship(element, spdxId),
"Person" or "spdx:Person" =>
ParseAgent<Spdx3Person>(element, spdxId),
"Organization" or "spdx:Organization" =>
ParseAgent<Spdx3Organization>(element, spdxId),
"Tool" or "spdx:Tool" =>
ParseAgent<Spdx3Tool>(element, spdxId),
_ => ParseGenericElement(element, spdxId, type, warnings)
};
}
private Spdx3Package ParsePackage(JsonElement element, string spdxId)
{
return new Spdx3Package
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = GetStringProperty(element, "name"),
Summary = GetStringProperty(element, "summary"),
Description = GetStringProperty(element, "description"),
Comment = GetStringProperty(element, "comment"),
PackageVersion = GetStringProperty(element, "packageVersion"),
DownloadLocation = GetStringProperty(element, "downloadLocation"),
PackageUrl = GetStringProperty(element, "packageUrl"),
HomePage = GetStringProperty(element, "homePage"),
SourceInfo = GetStringProperty(element, "sourceInfo"),
CopyrightText = GetStringProperty(element, "copyrightText"),
SuppliedBy = GetStringProperty(element, "suppliedBy"),
VerifiedUsing = ParseIntegrityMethods(element),
ExternalRef = ParseExternalRefs(element),
ExternalIdentifier = ParseExternalIdentifiers(element),
OriginatedBy = GetStringArrayProperty(element, "originatedBy"),
AttributionText = GetStringArrayProperty(element, "attributionText")
};
}
private Spdx3File ParseFile(JsonElement element, string spdxId)
{
return new Spdx3File
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = GetStringProperty(element, "name") ?? string.Empty,
Summary = GetStringProperty(element, "summary"),
Description = GetStringProperty(element, "description"),
Comment = GetStringProperty(element, "comment"),
ContentType = GetStringProperty(element, "contentType"),
CopyrightText = GetStringProperty(element, "copyrightText"),
VerifiedUsing = ParseIntegrityMethods(element),
ExternalRef = ParseExternalRefs(element),
ExternalIdentifier = ParseExternalIdentifiers(element)
};
}
private Spdx3Snippet ParseSnippet(JsonElement element, string spdxId)
{
return new Spdx3Snippet
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = GetStringProperty(element, "name"),
Summary = GetStringProperty(element, "summary"),
Description = GetStringProperty(element, "description"),
Comment = GetStringProperty(element, "comment"),
SnippetFromFile = GetStringProperty(element, "snippetFromFile") ?? string.Empty,
CopyrightText = GetStringProperty(element, "copyrightText"),
VerifiedUsing = ParseIntegrityMethods(element),
ExternalRef = ParseExternalRefs(element),
ExternalIdentifier = ParseExternalIdentifiers(element)
};
}
private Spdx3SpdxDocument ParseSpdxDocument(JsonElement element, string spdxId)
{
return new Spdx3SpdxDocument
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = GetStringProperty(element, "name"),
Summary = GetStringProperty(element, "summary"),
Description = GetStringProperty(element, "description"),
Comment = GetStringProperty(element, "comment"),
Element = GetStringArrayProperty(element, "element"),
RootElement = GetStringArrayProperty(element, "rootElement"),
VerifiedUsing = ParseIntegrityMethods(element),
ExternalRef = ParseExternalRefs(element),
ExternalIdentifier = ParseExternalIdentifiers(element)
};
}
private Spdx3Relationship ParseRelationship(JsonElement element, string spdxId)
{
var toValue = GetStringProperty(element, "to");
var toArray = toValue != null
? [toValue]
: GetStringArrayProperty(element, "to");
var relationshipType = GetStringProperty(element, "relationshipType") ?? "Other";
if (!Enum.TryParse<Spdx3RelationshipType>(relationshipType, ignoreCase: true, out var relType))
{
relType = Spdx3RelationshipType.Other;
}
return new Spdx3Relationship
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
From = GetStringProperty(element, "from") ?? string.Empty,
To = toArray,
RelationshipType = relType,
Comment = GetStringProperty(element, "comment"),
VerifiedUsing = ParseIntegrityMethods(element),
ExternalRef = ParseExternalRefs(element),
ExternalIdentifier = ParseExternalIdentifiers(element)
};
}
private T ParseAgent<T>(JsonElement element, string spdxId) where T : Spdx3Element
{
var name = GetStringProperty(element, "name") ?? string.Empty;
if (typeof(T) == typeof(Spdx3Person))
{
return (T)(object)new Spdx3Person
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = name
};
}
if (typeof(T) == typeof(Spdx3Organization))
{
return (T)(object)new Spdx3Organization
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = name
};
}
if (typeof(T) == typeof(Spdx3Tool))
{
return (T)(object)new Spdx3Tool
{
SpdxId = spdxId,
Type = GetStringProperty(element, "@type"),
Name = name
};
}
throw new InvalidOperationException($"Unsupported agent type: {typeof(T)}");
}
private Spdx3Element? ParseGenericElement(
JsonElement element,
string spdxId,
string type,
List<Spdx3ParseWarning> warnings)
{
// For unknown types, we could create a generic element
// For now, log a warning and skip
warnings.Add(new Spdx3ParseWarning("UNKNOWN_TYPE", $"Unknown element type: {type}", spdxId));
return null;
}
private Spdx3CreationInfo? ExtractCreationInfo(
JsonElement element,
List<Spdx3ParseError> errors)
{
if (!element.TryGetProperty("creationInfo", out var ciElement))
{
return null;
}
if (ciElement.ValueKind == JsonValueKind.String)
{
// Reference to CreationInfo - will be resolved later
return null;
}
if (ciElement.ValueKind != JsonValueKind.Object)
{
return null;
}
var specVersion = GetStringProperty(ciElement, "specVersion");
if (string.IsNullOrEmpty(specVersion))
{
return null;
}
var createdStr = GetStringProperty(ciElement, "created");
if (!DateTimeOffset.TryParse(createdStr, out var created))
{
created = DateTimeOffset.UtcNow;
}
var profileStrings = GetStringArrayProperty(ciElement, "profile");
var profiles = profileStrings
.Select(p => Spdx3ProfileUris.Parse(p))
.Where(p => p.HasValue)
.Select(p => p!.Value)
.ToImmutableArray();
return new Spdx3CreationInfo
{
Id = GetStringProperty(ciElement, "@id"),
Type = GetStringProperty(ciElement, "@type"),
SpecVersion = specVersion,
Created = created,
CreatedBy = GetStringArrayProperty(ciElement, "createdBy"),
CreatedUsing = GetStringArrayProperty(ciElement, "createdUsing"),
Profile = profiles,
DataLicense = GetStringProperty(ciElement, "dataLicense"),
Comment = GetStringProperty(ciElement, "comment")
};
}
private ImmutableArray<Spdx3IntegrityMethod> ParseIntegrityMethods(JsonElement element)
{
if (!element.TryGetProperty("verifiedUsing", out var verifiedUsing))
{
return [];
}
if (verifiedUsing.ValueKind != JsonValueKind.Array)
{
return [];
}
var methods = new List<Spdx3IntegrityMethod>();
foreach (var item in verifiedUsing.EnumerateArray())
{
var algorithm = GetStringProperty(item, "algorithm");
var hashValue = GetStringProperty(item, "hashValue");
if (!string.IsNullOrEmpty(algorithm) && !string.IsNullOrEmpty(hashValue))
{
if (Enum.TryParse<Spdx3HashAlgorithm>(algorithm.Replace("-", "_"), ignoreCase: true, out var algo))
{
methods.Add(new Spdx3Hash
{
Type = GetStringProperty(item, "@type"),
Algorithm = algo,
HashValue = hashValue.ToLowerInvariant(),
Comment = GetStringProperty(item, "comment")
});
}
}
}
return [.. methods];
}
private ImmutableArray<Spdx3ExternalRef> ParseExternalRefs(JsonElement element)
{
if (!element.TryGetProperty("externalRef", out var externalRef))
{
return [];
}
if (externalRef.ValueKind != JsonValueKind.Array)
{
return [];
}
var refs = new List<Spdx3ExternalRef>();
foreach (var item in externalRef.EnumerateArray())
{
refs.Add(new Spdx3ExternalRef
{
Type = GetStringProperty(item, "@type"),
Locator = GetStringArrayProperty(item, "locator"),
ContentType = GetStringProperty(item, "contentType"),
Comment = GetStringProperty(item, "comment")
});
}
return [.. refs];
}
private ImmutableArray<Spdx3ExternalIdentifier> ParseExternalIdentifiers(JsonElement element)
{
if (!element.TryGetProperty("externalIdentifier", out var externalId))
{
return [];
}
if (externalId.ValueKind != JsonValueKind.Array)
{
return [];
}
var identifiers = new List<Spdx3ExternalIdentifier>();
foreach (var item in externalId.EnumerateArray())
{
var identifier = GetStringProperty(item, "identifier");
if (string.IsNullOrEmpty(identifier))
{
continue;
}
var typeStr = GetStringProperty(item, "externalIdentifierType");
Spdx3ExternalIdentifierType? idType = null;
if (!string.IsNullOrEmpty(typeStr) &&
Enum.TryParse<Spdx3ExternalIdentifierType>(typeStr, ignoreCase: true, out var parsed))
{
idType = parsed;
}
identifiers.Add(new Spdx3ExternalIdentifier
{
Type = GetStringProperty(item, "@type"),
ExternalIdentifierType = idType,
Identifier = identifier,
Comment = GetStringProperty(item, "comment"),
IssuingAuthority = GetStringProperty(item, "issuingAuthority")
});
}
return [.. identifiers];
}
private static string? GetStringProperty(JsonElement element, string propertyName)
{
if (element.TryGetProperty(propertyName, out var prop) &&
prop.ValueKind == JsonValueKind.String)
{
return prop.GetString();
}
return null;
}
private static ImmutableArray<string> GetStringArrayProperty(JsonElement element, string propertyName)
{
if (!element.TryGetProperty(propertyName, out var prop))
{
return [];
}
if (prop.ValueKind == JsonValueKind.String)
{
var value = prop.GetString();
return value != null ? [value] : [];
}
if (prop.ValueKind == JsonValueKind.Array)
{
var list = new List<string>();
foreach (var item in prop.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.String)
{
var value = item.GetString();
if (value != null)
{
list.Add(value);
}
}
}
return [.. list];
}
return [];
}
private static string? GetContextReference(JsonElement contextElement)
{
if (contextElement.ValueKind == JsonValueKind.String)
{
return contextElement.GetString();
}
if (contextElement.ValueKind == JsonValueKind.Array)
{
foreach (var item in contextElement.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.String)
{
return item.GetString();
}
}
}
return null;
}
}

View File

@@ -0,0 +1,203 @@
// <copyright file="Spdx3VersionDetector.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
namespace StellaOps.Spdx3;
/// <summary>
/// Detects the SPDX version of a document.
/// </summary>
public static class Spdx3VersionDetector
{
/// <summary>
/// Detected SPDX version.
/// </summary>
public enum SpdxVersion
{
/// <summary>
/// Unknown version.
/// </summary>
Unknown,
/// <summary>
/// SPDX 2.2.
/// </summary>
Spdx22,
/// <summary>
/// SPDX 2.3.
/// </summary>
Spdx23,
/// <summary>
/// SPDX 3.0.1.
/// </summary>
Spdx301
}
/// <summary>
/// Version detection result.
/// </summary>
/// <param name="Version">The detected version.</param>
/// <param name="VersionString">The raw version string if found.</param>
/// <param name="IsJsonLd">Whether the document uses JSON-LD format.</param>
public readonly record struct DetectionResult(
SpdxVersion Version,
string? VersionString,
bool IsJsonLd);
/// <summary>
/// Detects the SPDX version from a JSON document.
/// </summary>
/// <param name="json">The JSON content.</param>
/// <returns>The detection result.</returns>
public static DetectionResult Detect(string json)
{
using var document = JsonDocument.Parse(json);
return Detect(document.RootElement);
}
/// <summary>
/// Detects the SPDX version from a JSON element.
/// </summary>
/// <param name="root">The root JSON element.</param>
/// <returns>The detection result.</returns>
public static DetectionResult Detect(JsonElement root)
{
// Check for JSON-LD @context (SPDX 3.x indicator)
if (root.TryGetProperty("@context", out var context))
{
var contextStr = GetContextString(context);
if (!string.IsNullOrEmpty(contextStr))
{
// Check for specific 3.0.1 context
if (contextStr.Contains("3.0.1", StringComparison.OrdinalIgnoreCase) ||
contextStr.Contains("spdx.org/rdf/3", StringComparison.OrdinalIgnoreCase))
{
return new DetectionResult(SpdxVersion.Spdx301, "3.0.1", true);
}
// Generic 3.x detection
if (contextStr.Contains("spdx.org/rdf", StringComparison.OrdinalIgnoreCase))
{
return new DetectionResult(SpdxVersion.Spdx301, null, true);
}
}
// Has @context but couldn't determine specific version
return new DetectionResult(SpdxVersion.Spdx301, null, true);
}
// Check for SPDX 2.x spdxVersion field
if (root.TryGetProperty("spdxVersion", out var spdxVersion) &&
spdxVersion.ValueKind == JsonValueKind.String)
{
var versionStr = spdxVersion.GetString();
if (!string.IsNullOrEmpty(versionStr))
{
if (versionStr.Contains("2.3", StringComparison.OrdinalIgnoreCase))
{
return new DetectionResult(SpdxVersion.Spdx23, versionStr, false);
}
if (versionStr.Contains("2.2", StringComparison.OrdinalIgnoreCase))
{
return new DetectionResult(SpdxVersion.Spdx22, versionStr, false);
}
// Older 2.x versions
if (versionStr.StartsWith("SPDX-2", StringComparison.OrdinalIgnoreCase))
{
return new DetectionResult(SpdxVersion.Spdx22, versionStr, false);
}
}
}
// Check for creationInfo.specVersion (SPDX 3.x in @graph format)
if (root.TryGetProperty("@graph", out var graph) && graph.ValueKind == JsonValueKind.Array)
{
foreach (var element in graph.EnumerateArray())
{
if (element.TryGetProperty("creationInfo", out var creationInfo) &&
creationInfo.ValueKind == JsonValueKind.Object)
{
if (creationInfo.TryGetProperty("specVersion", out var specVersion) &&
specVersion.ValueKind == JsonValueKind.String)
{
var specVersionStr = specVersion.GetString();
if (specVersionStr == "3.0.1")
{
return new DetectionResult(SpdxVersion.Spdx301, specVersionStr, true);
}
}
}
}
}
return new DetectionResult(SpdxVersion.Unknown, null, false);
}
/// <summary>
/// Detects the SPDX version from a stream.
/// </summary>
/// <param name="stream">The input stream.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The detection result.</returns>
public static async Task<DetectionResult> DetectAsync(
Stream stream,
CancellationToken cancellationToken = default)
{
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return Detect(document.RootElement);
}
/// <summary>
/// Gets the recommended parser for the detected version.
/// </summary>
/// <param name="version">The detected version.</param>
/// <returns>Parser recommendation.</returns>
public static string GetParserRecommendation(SpdxVersion version) => version switch
{
SpdxVersion.Spdx22 => "Use SpdxParser (SPDX 2.x parser)",
SpdxVersion.Spdx23 => "Use SpdxParser (SPDX 2.x parser)",
SpdxVersion.Spdx301 => "Use Spdx3Parser (SPDX 3.0.1 parser)",
_ => "Unknown format - manual inspection required"
};
private static string? GetContextString(JsonElement context)
{
if (context.ValueKind == JsonValueKind.String)
{
return context.GetString();
}
if (context.ValueKind == JsonValueKind.Array)
{
foreach (var item in context.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.String)
{
var str = item.GetString();
if (!string.IsNullOrEmpty(str) && str.Contains("spdx", StringComparison.OrdinalIgnoreCase))
{
return str;
}
}
}
// Return first string if no spdx-specific one found
foreach (var item in context.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.String)
{
return item.GetString();
}
}
}
return null;
}
}

View File

@@ -0,0 +1,25 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<RootNamespace>StellaOps.Spdx3</RootNamespace>
<AssemblyName>StellaOps.Spdx3</AssemblyName>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Description>SPDX 3.0.1 parsing library with full profile support for StellaOps</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />
<PackageReference Include="Microsoft.Extensions.Http" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,155 @@
// <copyright file="ISpdx3Validator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Spdx3.Model;
namespace StellaOps.Spdx3.Validation;
/// <summary>
/// Interface for SPDX 3.0.1 document validation.
/// </summary>
public interface ISpdx3Validator
{
/// <summary>
/// Validates an SPDX 3.0.1 document.
/// </summary>
/// <param name="document">The document to validate.</param>
/// <param name="options">Validation options.</param>
/// <returns>The validation result.</returns>
Spdx3ValidationResult Validate(
Spdx3Document document,
Spdx3ValidationOptions? options = null);
}
/// <summary>
/// Options for SPDX 3.0.1 validation.
/// </summary>
public sealed class Spdx3ValidationOptions
{
/// <summary>
/// Gets or sets whether to validate profile-specific requirements.
/// </summary>
public bool ValidateProfiles { get; set; } = true;
/// <summary>
/// Gets or sets whether to validate relationships.
/// </summary>
public bool ValidateRelationships { get; set; } = true;
/// <summary>
/// Gets or sets whether to validate external identifiers (PURL format, etc.).
/// </summary>
public bool ValidateExternalIdentifiers { get; set; } = true;
/// <summary>
/// Gets or sets whether to validate hash values.
/// </summary>
public bool ValidateHashes { get; set; } = true;
/// <summary>
/// Gets or sets whether warnings should be treated as errors.
/// </summary>
public bool TreatWarningsAsErrors { get; set; } = false;
/// <summary>
/// Gets or sets the minimum required profiles.
/// </summary>
public ImmutableHashSet<Spdx3ProfileIdentifier> RequiredProfiles { get; set; } =
ImmutableHashSet<Spdx3ProfileIdentifier>.Empty;
}
/// <summary>
/// Result of SPDX 3.0.1 validation.
/// </summary>
public sealed record Spdx3ValidationResult
{
/// <summary>
/// Gets whether validation passed.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Gets validation errors.
/// </summary>
public IReadOnlyList<Spdx3ValidationIssue> Errors { get; init; } = [];
/// <summary>
/// Gets validation warnings.
/// </summary>
public IReadOnlyList<Spdx3ValidationIssue> Warnings { get; init; } = [];
/// <summary>
/// Gets validation information.
/// </summary>
public IReadOnlyList<Spdx3ValidationIssue> Info { get; init; } = [];
/// <summary>
/// Creates a valid result.
/// </summary>
public static Spdx3ValidationResult Valid(
IReadOnlyList<Spdx3ValidationIssue>? warnings = null,
IReadOnlyList<Spdx3ValidationIssue>? info = null)
{
return new Spdx3ValidationResult
{
IsValid = true,
Warnings = warnings ?? [],
Info = info ?? []
};
}
/// <summary>
/// Creates an invalid result.
/// </summary>
public static Spdx3ValidationResult Invalid(
IReadOnlyList<Spdx3ValidationIssue> errors,
IReadOnlyList<Spdx3ValidationIssue>? warnings = null,
IReadOnlyList<Spdx3ValidationIssue>? info = null)
{
return new Spdx3ValidationResult
{
IsValid = false,
Errors = errors,
Warnings = warnings ?? [],
Info = info ?? []
};
}
}
/// <summary>
/// Represents a validation issue.
/// </summary>
/// <param name="Code">Issue code.</param>
/// <param name="Message">Issue message.</param>
/// <param name="Severity">Issue severity.</param>
/// <param name="ElementId">The SPDX ID of the affected element.</param>
/// <param name="Path">JSON path or property name.</param>
public sealed record Spdx3ValidationIssue(
string Code,
string Message,
Spdx3ValidationSeverity Severity,
string? ElementId = null,
string? Path = null);
/// <summary>
/// Validation issue severity.
/// </summary>
public enum Spdx3ValidationSeverity
{
/// <summary>
/// Informational message.
/// </summary>
Info,
/// <summary>
/// Warning (non-fatal).
/// </summary>
Warning,
/// <summary>
/// Error (fatal).
/// </summary>
Error
}

View File

@@ -0,0 +1,344 @@
// <copyright file="Spdx3Validator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Text.RegularExpressions;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Software;
namespace StellaOps.Spdx3.Validation;
/// <summary>
/// SPDX 3.0.1 document validator implementation.
/// </summary>
public sealed partial class Spdx3Validator : ISpdx3Validator
{
[GeneratedRegex(@"^pkg:[a-z]+/.+", RegexOptions.IgnoreCase)]
private static partial Regex PurlPattern();
[GeneratedRegex(@"^cpe:2\.3:[aho\*\-]", RegexOptions.IgnoreCase)]
private static partial Regex Cpe23Pattern();
[GeneratedRegex(@"^[a-fA-F0-9]+$")]
private static partial Regex HexPattern();
/// <inheritdoc />
public Spdx3ValidationResult Validate(
Spdx3Document document,
Spdx3ValidationOptions? options = null)
{
options ??= new Spdx3ValidationOptions();
var errors = new List<Spdx3ValidationIssue>();
var warnings = new List<Spdx3ValidationIssue>();
var info = new List<Spdx3ValidationIssue>();
// Core validation
ValidateCore(document, errors, warnings, info);
// Profile validation
if (options.ValidateProfiles)
{
ValidateProfiles(document, options, errors, warnings, info);
}
// Relationship validation
if (options.ValidateRelationships)
{
ValidateRelationships(document, errors, warnings, info);
}
// External identifier validation
if (options.ValidateExternalIdentifiers)
{
ValidateExternalIdentifiers(document, errors, warnings, info);
}
// Hash validation
if (options.ValidateHashes)
{
ValidateHashes(document, errors, warnings, info);
}
// Treat warnings as errors if configured
if (options.TreatWarningsAsErrors && warnings.Count > 0)
{
errors.AddRange(warnings.Select(w => w with { Severity = Spdx3ValidationSeverity.Error }));
warnings.Clear();
}
if (errors.Count > 0)
{
return Spdx3ValidationResult.Invalid(errors, warnings, info);
}
return Spdx3ValidationResult.Valid(warnings, info);
}
private static void ValidateCore(
Spdx3Document document,
List<Spdx3ValidationIssue> errors,
List<Spdx3ValidationIssue> warnings,
List<Spdx3ValidationIssue> info)
{
// Must have at least one element
if (document.AllElements.Length == 0)
{
errors.Add(new Spdx3ValidationIssue(
"EMPTY_DOCUMENT",
"Document contains no elements",
Spdx3ValidationSeverity.Error));
return;
}
// Validate each element has required fields
foreach (var element in document.AllElements)
{
if (string.IsNullOrWhiteSpace(element.SpdxId))
{
errors.Add(new Spdx3ValidationIssue(
"MISSING_SPDX_ID",
"Element is missing spdxId",
Spdx3ValidationSeverity.Error,
Path: element.Type));
}
}
// Check for duplicate spdxIds
var duplicates = document.AllElements
.GroupBy(e => e.SpdxId)
.Where(g => g.Count() > 1)
.Select(g => g.Key)
.ToList();
foreach (var dup in duplicates)
{
errors.Add(new Spdx3ValidationIssue(
"DUPLICATE_SPDX_ID",
$"Duplicate spdxId found: {dup}",
Spdx3ValidationSeverity.Error,
ElementId: dup));
}
// Validate SpdxDocument if present
if (document.SpdxDocument != null)
{
if (document.SpdxDocument.RootElement.IsEmpty)
{
warnings.Add(new Spdx3ValidationIssue(
"NO_ROOT_ELEMENT",
"SpdxDocument has no rootElement specified",
Spdx3ValidationSeverity.Warning,
ElementId: document.SpdxDocument.SpdxId));
}
}
// Info about document contents
info.Add(new Spdx3ValidationIssue(
"DOCUMENT_STATS",
$"Document contains {document.Packages.Length} packages, " +
$"{document.Files.Length} files, " +
$"{document.Relationships.Length} relationships",
Spdx3ValidationSeverity.Info));
}
private static void ValidateProfiles(
Spdx3Document document,
Spdx3ValidationOptions options,
List<Spdx3ValidationIssue> errors,
List<Spdx3ValidationIssue> warnings,
List<Spdx3ValidationIssue> info)
{
// Check required profiles
foreach (var required in options.RequiredProfiles)
{
if (!document.ConformsTo(required))
{
errors.Add(new Spdx3ValidationIssue(
"MISSING_REQUIRED_PROFILE",
$"Document does not conform to required profile: {required}",
Spdx3ValidationSeverity.Error));
}
}
// Validate Software profile requirements
if (document.ConformsTo(Spdx3ProfileIdentifier.Software))
{
foreach (var package in document.Packages)
{
// Software profile packages should have a name
if (string.IsNullOrWhiteSpace(package.Name))
{
warnings.Add(new Spdx3ValidationIssue(
"PACKAGE_MISSING_NAME",
"Package is missing name (recommended for Software profile)",
Spdx3ValidationSeverity.Warning,
ElementId: package.SpdxId));
}
}
}
// Validate Lite profile requirements
if (document.ConformsTo(Spdx3ProfileIdentifier.Lite))
{
foreach (var package in document.Packages)
{
// Lite profile requires name and version
if (string.IsNullOrWhiteSpace(package.Name))
{
errors.Add(new Spdx3ValidationIssue(
"LITE_PROFILE_MISSING_NAME",
"Lite profile requires package name",
Spdx3ValidationSeverity.Error,
ElementId: package.SpdxId));
}
}
}
// Report detected profiles
var profileNames = string.Join(", ", document.Profiles.Select(p => p.ToString()));
if (!string.IsNullOrEmpty(profileNames))
{
info.Add(new Spdx3ValidationIssue(
"DETECTED_PROFILES",
$"Document conforms to profiles: {profileNames}",
Spdx3ValidationSeverity.Info));
}
}
private static void ValidateRelationships(
Spdx3Document document,
List<Spdx3ValidationIssue> errors,
List<Spdx3ValidationIssue> warnings,
List<Spdx3ValidationIssue> info)
{
var elementIds = document.Elements.Select(e => e.SpdxId).ToHashSet(StringComparer.Ordinal);
foreach (var relationship in document.Relationships)
{
// Validate 'from' reference
if (!elementIds.Contains(relationship.From))
{
warnings.Add(new Spdx3ValidationIssue(
"DANGLING_RELATIONSHIP_FROM",
$"Relationship 'from' references unknown element: {relationship.From}",
Spdx3ValidationSeverity.Warning,
ElementId: relationship.SpdxId));
}
// Validate 'to' references
foreach (var to in relationship.To)
{
if (!elementIds.Contains(to))
{
warnings.Add(new Spdx3ValidationIssue(
"DANGLING_RELATIONSHIP_TO",
$"Relationship 'to' references unknown element: {to}",
Spdx3ValidationSeverity.Warning,
ElementId: relationship.SpdxId));
}
}
// Validate relationship has targets
if (relationship.To.IsEmpty)
{
errors.Add(new Spdx3ValidationIssue(
"EMPTY_RELATIONSHIP_TO",
"Relationship has no 'to' targets",
Spdx3ValidationSeverity.Error,
ElementId: relationship.SpdxId));
}
}
}
private static void ValidateExternalIdentifiers(
Spdx3Document document,
List<Spdx3ValidationIssue> errors,
List<Spdx3ValidationIssue> warnings,
List<Spdx3ValidationIssue> info)
{
foreach (var element in document.Elements)
{
foreach (var extId in element.ExternalIdentifier)
{
switch (extId.ExternalIdentifierType)
{
case Spdx3ExternalIdentifierType.PackageUrl:
if (!PurlPattern().IsMatch(extId.Identifier))
{
warnings.Add(new Spdx3ValidationIssue(
"INVALID_PURL_FORMAT",
$"Invalid PURL format: {extId.Identifier}",
Spdx3ValidationSeverity.Warning,
ElementId: element.SpdxId,
Path: "externalIdentifier/identifier"));
}
break;
case Spdx3ExternalIdentifierType.Cpe23:
if (!Cpe23Pattern().IsMatch(extId.Identifier))
{
warnings.Add(new Spdx3ValidationIssue(
"INVALID_CPE23_FORMAT",
$"Invalid CPE 2.3 format: {extId.Identifier}",
Spdx3ValidationSeverity.Warning,
ElementId: element.SpdxId,
Path: "externalIdentifier/identifier"));
}
break;
}
}
}
}
private static void ValidateHashes(
Spdx3Document document,
List<Spdx3ValidationIssue> errors,
List<Spdx3ValidationIssue> warnings,
List<Spdx3ValidationIssue> info)
{
foreach (var element in document.Elements)
{
foreach (var integrity in element.VerifiedUsing)
{
if (integrity is Spdx3Hash hash)
{
// Validate hex format
if (!HexPattern().IsMatch(hash.HashValue))
{
errors.Add(new Spdx3ValidationIssue(
"INVALID_HASH_FORMAT",
$"Hash value is not valid hex: {hash.HashValue}",
Spdx3ValidationSeverity.Error,
ElementId: element.SpdxId,
Path: "verifiedUsing/hashValue"));
}
// Validate hash length
if (!hash.IsValidLength())
{
warnings.Add(new Spdx3ValidationIssue(
"INVALID_HASH_LENGTH",
$"Hash value has unexpected length for {hash.Algorithm}: expected {hash.GetExpectedLength()}, got {hash.HashValue.Length}",
Spdx3ValidationSeverity.Warning,
ElementId: element.SpdxId,
Path: "verifiedUsing/hashValue"));
}
// Warn about deprecated algorithms
if (hash.Algorithm.IsDeprecated())
{
warnings.Add(new Spdx3ValidationIssue(
"DEPRECATED_HASH_ALGORITHM",
$"Hash algorithm {hash.Algorithm} is deprecated",
Spdx3ValidationSeverity.Warning,
ElementId: element.SpdxId,
Path: "verifiedUsing/algorithm"));
}
}
}
}
}
}

View File

@@ -2,6 +2,7 @@
// Sprint: SPRINT_1227_0014_0001_BE_stellaverdict_consolidation
// Task 9: Verdict Replay Bundle Exporter
using System.Globalization;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
@@ -190,7 +191,7 @@ public sealed class VerdictBundleExporter : IVerdictBundleExporter
var manifest = new BundleManifest
{
Version = "1.0",
CreatedAt = _timeProvider.GetUtcNow().ToString("O"),
CreatedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
VerdictId = verdict.VerdictId,
VulnerabilityId = verdict.Subject.VulnerabilityId,
Purl = verdict.Subject.Purl,
@@ -296,7 +297,7 @@ public sealed class VerdictBundleExporter : IVerdictBundleExporter
var manifest = new BundleManifest
{
Version = "1.0",
CreatedAt = _timeProvider.GetUtcNow().ToString("O"),
CreatedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
VerdictId = verdict.VerdictId,
VulnerabilityId = verdict.Subject.VulnerabilityId,
Purl = verdict.Subject.Purl,

View File

@@ -2,6 +2,7 @@
// Sprint: SPRINT_1227_0014_0001_BE_stellaverdict_consolidation
// Task 6: OCI Attestation Publisher
using System.Globalization;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
@@ -551,7 +552,7 @@ public sealed class OciAttestationPublisher : IOciAttestationPublisher
: null,
annotations = new Dictionary<string, string>
{
["org.opencontainers.image.created"] = _timeProvider.GetUtcNow().ToString("O"),
["org.opencontainers.image.created"] = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
["org.stellaops.verdict.version"] = verdict.Version
}
};

View File

@@ -5,10 +5,6 @@
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -8,10 +8,6 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

View File

@@ -10,10 +10,6 @@
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

View File

@@ -0,0 +1,22 @@
# Eventing Tests Charter
## Mission
- Verify eventing envelope contracts and deterministic serialization.
## Responsibilities
- Cover schema validation, parsing, and round-trip serialization.
- Exercise edge cases and determinism behavior.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/eventing/event-envelope-schema.md
## Working Agreement
- Use fixed times and IDs for deterministic tests.
- Avoid network dependencies in tests.
## Testing Strategy
- Unit tests for schema and serialization.
- Regression tests for envelope compatibility.

View File

@@ -0,0 +1,154 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using FluentAssertions;
using StellaOps.Eventing.Internal;
using StellaOps.HybridLogicalClock;
using Xunit;
namespace StellaOps.Eventing.Tests;
[Trait("Category", "Unit")]
public sealed class EventIdGeneratorTests
{
[Fact]
public void Generate_SameInputs_ProducesSameId()
{
// Arrange
var correlationId = "scan-abc123";
var tHlc = new HlcTimestamp(1704585600000, 0, "node1");
var service = "Scheduler";
var kind = "ENQUEUE";
// Act
var id1 = EventIdGenerator.Generate(correlationId, tHlc, service, kind);
var id2 = EventIdGenerator.Generate(correlationId, tHlc, service, kind);
// Assert
id1.Should().Be(id2);
}
[Fact]
public void Generate_DifferentCorrelationId_ProducesDifferentId()
{
// Arrange
var tHlc = new HlcTimestamp(1704585600000, 0, "node1");
var service = "Scheduler";
var kind = "ENQUEUE";
// Act
var id1 = EventIdGenerator.Generate("scan-abc123", tHlc, service, kind);
var id2 = EventIdGenerator.Generate("scan-xyz789", tHlc, service, kind);
// Assert
id1.Should().NotBe(id2);
}
[Fact]
public void Generate_DifferentHlc_ProducesDifferentId()
{
// Arrange
var correlationId = "scan-abc123";
var tHlc1 = new HlcTimestamp(1704585600000, 0, "node1");
var tHlc2 = new HlcTimestamp(1704585600000, 1, "node1");
var service = "Scheduler";
var kind = "ENQUEUE";
// Act
var id1 = EventIdGenerator.Generate(correlationId, tHlc1, service, kind);
var id2 = EventIdGenerator.Generate(correlationId, tHlc2, service, kind);
// Assert
id1.Should().NotBe(id2);
}
[Fact]
public void Generate_DifferentService_ProducesDifferentId()
{
// Arrange
var correlationId = "scan-abc123";
var tHlc = new HlcTimestamp(1704585600000, 0, "node1");
var kind = "ENQUEUE";
// Act
var id1 = EventIdGenerator.Generate(correlationId, tHlc, "Scheduler", kind);
var id2 = EventIdGenerator.Generate(correlationId, tHlc, "AirGap", kind);
// Assert
id1.Should().NotBe(id2);
}
[Fact]
public void Generate_DifferentKind_ProducesDifferentId()
{
// Arrange
var correlationId = "scan-abc123";
var tHlc = new HlcTimestamp(1704585600000, 0, "node1");
var service = "Scheduler";
// Act
var id1 = EventIdGenerator.Generate(correlationId, tHlc, service, "ENQUEUE");
var id2 = EventIdGenerator.Generate(correlationId, tHlc, service, "EXECUTE");
// Assert
id1.Should().NotBe(id2);
}
[Fact]
public void Generate_ReturnsLowercaseHex32Chars()
{
// Arrange
var correlationId = "scan-abc123";
var tHlc = new HlcTimestamp(1704585600000, 0, "node1");
var service = "Scheduler";
var kind = "ENQUEUE";
// Act
var id = EventIdGenerator.Generate(correlationId, tHlc, service, kind);
// Assert
id.Should().HaveLength(32);
id.Should().MatchRegex("^[a-f0-9]{32}$");
}
[Fact]
public void ComputePayloadDigest_SamePayload_ProducesSameDigest()
{
// Arrange
var payload = """{"key":"value"}""";
// Act
var digest1 = EventIdGenerator.ComputePayloadDigest(payload);
var digest2 = EventIdGenerator.ComputePayloadDigest(payload);
// Assert
digest1.Should().BeEquivalentTo(digest2);
}
[Fact]
public void ComputePayloadDigest_DifferentPayload_ProducesDifferentDigest()
{
// Arrange
var payload1 = """{"key":"value1"}""";
var payload2 = """{"key":"value2"}""";
// Act
var digest1 = EventIdGenerator.ComputePayloadDigest(payload1);
var digest2 = EventIdGenerator.ComputePayloadDigest(payload2);
// Assert
digest1.Should().NotBeEquivalentTo(digest2);
}
[Fact]
public void ComputePayloadDigest_Returns32Bytes()
{
// Arrange
var payload = """{"key":"value"}""";
// Act
var digest = EventIdGenerator.ComputePayloadDigest(payload);
// Assert
digest.Should().HaveCount(32); // SHA-256 = 256 bits = 32 bytes
}
}

View File

@@ -0,0 +1,212 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using FluentAssertions;
using StellaOps.Eventing.Models;
using StellaOps.Eventing.Storage;
using StellaOps.HybridLogicalClock;
using Xunit;
namespace StellaOps.Eventing.Tests;
[Trait("Category", "Unit")]
public sealed class InMemoryTimelineEventStoreTests
{
private readonly InMemoryTimelineEventStore _store;
public InMemoryTimelineEventStoreTests()
{
_store = new InMemoryTimelineEventStore();
}
private static TimelineEvent CreateEvent(
string correlationId,
string kind,
HlcTimestamp hlc,
string service = "TestService")
{
return new TimelineEvent
{
EventId = $"{correlationId}-{kind}-{hlc.LogicalCounter}",
CorrelationId = correlationId,
Kind = kind,
THlc = hlc,
TsWall = DateTimeOffset.UtcNow,
Service = service,
Payload = "{}",
PayloadDigest = new byte[32],
EngineVersion = new EngineVersionRef("Test", "1.0.0", "test"),
SchemaVersion = 1
};
}
[Fact]
public async Task AppendAsync_StoresEvent()
{
// Arrange
var e = CreateEvent("corr-1", "ENQUEUE", new HlcTimestamp(1000, 0, "n1"));
// Act
await _store.AppendAsync(e);
// Assert
var retrieved = await _store.GetByIdAsync(e.EventId);
retrieved.Should().NotBeNull();
retrieved!.EventId.Should().Be(e.EventId);
}
[Fact]
public async Task AppendAsync_Idempotent_DoesNotDuplicate()
{
// Arrange
var e = CreateEvent("corr-1", "ENQUEUE", new HlcTimestamp(1000, 0, "n1"));
// Act
await _store.AppendAsync(e);
await _store.AppendAsync(e); // Duplicate
// Assert
var count = await _store.CountByCorrelationIdAsync("corr-1");
count.Should().Be(1);
}
[Fact]
public async Task GetByCorrelationIdAsync_ReturnsOrderedByHlc()
{
// Arrange
var hlc1 = new HlcTimestamp(1000, 0, "n1");
var hlc2 = new HlcTimestamp(1000, 1, "n1");
var hlc3 = new HlcTimestamp(2000, 0, "n1");
// Insert out of order
await _store.AppendAsync(CreateEvent("corr-1", "C", hlc3));
await _store.AppendAsync(CreateEvent("corr-1", "A", hlc1));
await _store.AppendAsync(CreateEvent("corr-1", "B", hlc2));
// Act
var events = await _store.GetByCorrelationIdAsync("corr-1");
// Assert
events.Should().HaveCount(3);
events[0].Kind.Should().Be("A");
events[1].Kind.Should().Be("B");
events[2].Kind.Should().Be("C");
}
[Fact]
public async Task GetByCorrelationIdAsync_Pagination_Works()
{
// Arrange
for (int i = 0; i < 10; i++)
{
await _store.AppendAsync(CreateEvent("corr-1", $"E{i}", new HlcTimestamp(1000 + i, 0, "n1")));
}
// Act
var page1 = await _store.GetByCorrelationIdAsync("corr-1", limit: 3, offset: 0);
var page2 = await _store.GetByCorrelationIdAsync("corr-1", limit: 3, offset: 3);
// Assert
page1.Should().HaveCount(3);
page2.Should().HaveCount(3);
page1[0].Kind.Should().Be("E0");
page2[0].Kind.Should().Be("E3");
}
[Fact]
public async Task GetByHlcRangeAsync_FiltersCorrectly()
{
// Arrange
await _store.AppendAsync(CreateEvent("corr-1", "A", new HlcTimestamp(1000, 0, "n1")));
await _store.AppendAsync(CreateEvent("corr-1", "B", new HlcTimestamp(2000, 0, "n1")));
await _store.AppendAsync(CreateEvent("corr-1", "C", new HlcTimestamp(3000, 0, "n1")));
await _store.AppendAsync(CreateEvent("corr-1", "D", new HlcTimestamp(4000, 0, "n1")));
// Act
var events = await _store.GetByHlcRangeAsync(
"corr-1",
new HlcTimestamp(2000, 0, "n1"),
new HlcTimestamp(3000, 0, "n1"));
// Assert
events.Should().HaveCount(2);
events[0].Kind.Should().Be("B");
events[1].Kind.Should().Be("C");
}
[Fact]
public async Task GetByServiceAsync_FiltersCorrectly()
{
// Arrange
await _store.AppendAsync(CreateEvent("corr-1", "A", new HlcTimestamp(1000, 0, "n1"), "Scheduler"));
await _store.AppendAsync(CreateEvent("corr-2", "B", new HlcTimestamp(2000, 0, "n1"), "AirGap"));
await _store.AppendAsync(CreateEvent("corr-3", "C", new HlcTimestamp(3000, 0, "n1"), "Scheduler"));
// Act
var events = await _store.GetByServiceAsync("Scheduler");
// Assert
events.Should().HaveCount(2);
events.All(e => e.Service == "Scheduler").Should().BeTrue();
}
[Fact]
public async Task GetByIdAsync_NotFound_ReturnsNull()
{
// Act
var result = await _store.GetByIdAsync("nonexistent");
// Assert
result.Should().BeNull();
}
[Fact]
public async Task CountByCorrelationIdAsync_ReturnsCorrectCount()
{
// Arrange
await _store.AppendAsync(CreateEvent("corr-1", "A", new HlcTimestamp(1000, 0, "n1")));
await _store.AppendAsync(CreateEvent("corr-1", "B", new HlcTimestamp(2000, 0, "n1")));
await _store.AppendAsync(CreateEvent("corr-2", "C", new HlcTimestamp(3000, 0, "n1")));
// Act
var count1 = await _store.CountByCorrelationIdAsync("corr-1");
var count2 = await _store.CountByCorrelationIdAsync("corr-2");
var count3 = await _store.CountByCorrelationIdAsync("corr-3");
// Assert
count1.Should().Be(2);
count2.Should().Be(1);
count3.Should().Be(0);
}
[Fact]
public async Task AppendBatchAsync_StoresAllEvents()
{
// Arrange
var events = new[]
{
CreateEvent("corr-1", "A", new HlcTimestamp(1000, 0, "n1")),
CreateEvent("corr-1", "B", new HlcTimestamp(2000, 0, "n1")),
CreateEvent("corr-1", "C", new HlcTimestamp(3000, 0, "n1"))
};
// Act
await _store.AppendBatchAsync(events);
// Assert
var count = await _store.CountByCorrelationIdAsync("corr-1");
count.Should().Be(3);
}
[Fact]
public void Clear_RemovesAllEvents()
{
// Arrange
_store.AppendAsync(CreateEvent("corr-1", "A", new HlcTimestamp(1000, 0, "n1"))).Wait();
// Act
_store.Clear();
// Assert
_store.GetAll().Should().BeEmpty();
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Eventing.Tests</RootNamespace>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Eventing\StellaOps.Eventing.csproj" />
<ProjectReference Include="..\..\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit" />
<PackageReference Include="xunit.runner.visualstudio">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="coverlet.collector">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Moq" />
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,183 @@
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using Moq;
using StellaOps.Eventing.Models;
using StellaOps.Eventing.Storage;
using StellaOps.HybridLogicalClock;
using Xunit;
namespace StellaOps.Eventing.Tests;
[Trait("Category", "Unit")]
public sealed class TimelineEventEmitterTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly Mock<IHybridLogicalClock> _hlcMock;
private readonly InMemoryTimelineEventStore _eventStore;
private readonly IOptions<EventingOptions> _options;
private readonly TimelineEventEmitter _emitter;
public TimelineEventEmitterTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero));
_hlcMock = new Mock<IHybridLogicalClock>();
_eventStore = new InMemoryTimelineEventStore();
_options = Options.Create(new EventingOptions
{
ServiceName = "TestService",
EngineVersion = new EngineVersionRef("TestEngine", "1.0.0", "sha256:test")
});
_emitter = new TimelineEventEmitter(
_hlcMock.Object,
_timeProvider,
_eventStore,
_options,
NullLogger<TimelineEventEmitter>.Instance);
}
[Fact]
public async Task EmitAsync_StoresEventWithCorrectFields()
{
// Arrange
var correlationId = "scan-abc123";
var kind = EventKinds.Enqueue;
var payload = new { JobId = "job-1", Status = "pending" };
var expectedHlc = new HlcTimestamp(1704585600000, 0, "node1");
_hlcMock.Setup(h => h.Tick()).Returns(expectedHlc);
// Act
var result = await _emitter.EmitAsync(correlationId, kind, payload);
// Assert
result.Should().NotBeNull();
result.CorrelationId.Should().Be(correlationId);
result.Kind.Should().Be(kind);
result.Service.Should().Be("TestService");
result.THlc.Should().Be(expectedHlc);
result.TsWall.Should().Be(_timeProvider.GetUtcNow());
result.SchemaVersion.Should().Be(1);
result.EngineVersion.EngineName.Should().Be("TestEngine");
result.EngineVersion.Version.Should().Be("1.0.0");
result.PayloadDigest.Should().NotBeEmpty();
result.EventId.Should().HaveLength(32);
}
[Fact]
public async Task EmitAsync_GeneratesDeterministicEventId()
{
// Arrange
var correlationId = "scan-abc123";
var kind = EventKinds.Execute;
var payload = new { Step = 1 };
var hlc = new HlcTimestamp(1704585600000, 0, "node1");
_hlcMock.Setup(h => h.Tick()).Returns(hlc);
// Act
var result1 = await _emitter.EmitAsync(correlationId, kind, payload);
// Create a second emitter with same config
var emitter2 = new TimelineEventEmitter(
_hlcMock.Object,
_timeProvider,
new InMemoryTimelineEventStore(),
_options,
NullLogger<TimelineEventEmitter>.Instance);
var result2 = await emitter2.EmitAsync(correlationId, kind, payload);
// Assert - Same inputs should produce same EventId
result1.EventId.Should().Be(result2.EventId);
}
[Fact]
public async Task EmitAsync_StoresEventInStore()
{
// Arrange
var correlationId = "scan-abc123";
var hlc = new HlcTimestamp(1704585600000, 0, "node1");
_hlcMock.Setup(h => h.Tick()).Returns(hlc);
// Act
var emitted = await _emitter.EmitAsync(correlationId, EventKinds.Enqueue, new { Test = true });
// Assert
var stored = await _eventStore.GetByIdAsync(emitted.EventId);
stored.Should().NotBeNull();
stored!.EventId.Should().Be(emitted.EventId);
}
[Fact]
public async Task EmitBatchAsync_StoresAllEvents()
{
// Arrange
var hlcCounter = 0L;
_hlcMock.Setup(h => h.Tick())
.Returns(() => new HlcTimestamp(1704585600000, hlcCounter++, "node1"));
var pendingEvents = new[]
{
new PendingEvent("scan-1", EventKinds.Enqueue, new { Step = 1 }),
new PendingEvent("scan-1", EventKinds.Execute, new { Step = 2 }),
new PendingEvent("scan-1", EventKinds.Complete, new { Step = 3 })
};
// Act
var results = await _emitter.EmitBatchAsync(pendingEvents);
// Assert
results.Should().HaveCount(3);
results.Select(r => r.Kind).Should().BeEquivalentTo(
new[] { EventKinds.Enqueue, EventKinds.Execute, EventKinds.Complete });
var stored = _eventStore.GetAll();
stored.Should().HaveCount(3);
}
[Fact]
public async Task EmitBatchAsync_EmptyBatch_ReturnsEmptyList()
{
// Act
var results = await _emitter.EmitBatchAsync(Array.Empty<PendingEvent>());
// Assert
results.Should().BeEmpty();
}
[Fact]
public async Task EmitAsync_IncludesPayloadDigest()
{
// Arrange
var hlc = new HlcTimestamp(1704585600000, 0, "node1");
_hlcMock.Setup(h => h.Tick()).Returns(hlc);
// Act
var result = await _emitter.EmitAsync("corr-1", EventKinds.Emit, new { Data = "test" });
// Assert
result.PayloadDigest.Should().NotBeNull();
result.PayloadDigest.Should().HaveCount(32); // SHA-256
}
[Fact]
public async Task EmitAsync_DifferentPayloads_DifferentDigests()
{
// Arrange
var hlcCounter = 0L;
_hlcMock.Setup(h => h.Tick())
.Returns(() => new HlcTimestamp(1704585600000, hlcCounter++, "node1"));
// Act
var result1 = await _emitter.EmitAsync("corr-1", EventKinds.Emit, new { Value = 1 });
var result2 = await _emitter.EmitAsync("corr-1", EventKinds.Emit, new { Value = 2 });
// Assert
result1.PayloadDigest.Should().NotBeEquivalentTo(result2.PayloadDigest);
}
}

View File

@@ -15,10 +15,6 @@
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>

View File

@@ -10,11 +10,8 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.v3.assert" />
<PackageReference Include="xunit.runner.visualstudio" PrivateAssets="all" />
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="Moq" />
</ItemGroup>

View File

@@ -16,10 +16,6 @@
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" />
<PackageReference Include="Microsoft.Extensions.Configuration" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" />
<PackageReference Include="xunit.runner.visualstudio" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>

View File

@@ -11,10 +11,6 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.runner.visualstudio" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" >
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>

View File

@@ -0,0 +1,24 @@
# SPDX3 Tests Charter
## Mission
- Validate SPDX 3.0.1 parsing, validation, and context resolution.
## Responsibilities
- Cover parser, validator, and version detection behaviors.
- Exercise offline and embedded context resolution paths.
- Guard determinism and error handling.
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/sbom-service/architecture.md
- docs/modules/sbom-service/spdx3-profile-support.md
## Working Agreement
- Use fixed times and IDs in fixtures.
- Avoid network access in tests.
## Testing Strategy
- Unit tests for parser, validator, and version detection.
- Determinism tests for ordering and serialized output.

View File

@@ -0,0 +1,272 @@
// <copyright file="ModelTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Software;
namespace StellaOps.Spdx3.Tests;
/// <summary>
/// Unit tests for SPDX 3.0.1 model classes.
/// </summary>
[Trait("Category", "Unit")]
public sealed class ModelTests
{
[Fact]
public void Spdx3Package_Equality_Works()
{
// Arrange
var pkg1 = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "test-package",
PackageVersion = "1.0.0"
};
var pkg2 = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "test-package",
PackageVersion = "1.0.0"
};
var pkg3 = new Spdx3Package
{
SpdxId = "urn:test:pkg2",
Name = "other-package",
PackageVersion = "2.0.0"
};
// Assert
Assert.Equal(pkg1, pkg2);
Assert.NotEqual(pkg1, pkg3);
}
[Fact]
public void Spdx3Relationship_TypeMapping_Works()
{
// Arrange
var relationship = new Spdx3Relationship
{
SpdxId = "urn:test:rel1",
From = "urn:test:pkg1",
To = ["urn:test:pkg2"],
RelationshipType = Spdx3RelationshipType.DependsOn
};
// Assert
Assert.Equal(Spdx3RelationshipType.DependsOn, relationship.RelationshipType);
Assert.Single(relationship.To);
}
[Fact]
public void Spdx3Hash_NormalizesValue()
{
// Arrange
var hash = new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890"
};
// Assert
Assert.Equal(
"abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
hash.NormalizedHashValue);
}
[Fact]
public void Spdx3Hash_ValidatesHex()
{
// Arrange
var validHash = new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
};
var invalidHash = new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "xyz-not-hex!"
};
// Assert
Assert.True(validHash.IsValidHex());
Assert.False(invalidHash.IsValidHex());
}
[Fact]
public void Spdx3Hash_ValidatesLength()
{
// Arrange
var validSha256 = new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890" // 64 chars
};
var invalidSha256 = new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "abcdef" // too short
};
// Assert
Assert.True(validSha256.IsValidLength());
Assert.False(invalidSha256.IsValidLength());
}
[Theory]
[InlineData(Spdx3HashAlgorithm.Sha256, 64)]
[InlineData(Spdx3HashAlgorithm.Sha512, 128)]
[InlineData(Spdx3HashAlgorithm.Sha3_256, 64)]
[InlineData(Spdx3HashAlgorithm.Blake2b256, 64)]
[InlineData(Spdx3HashAlgorithm.Md5, 32)]
public void Spdx3Hash_GetExpectedLength_ReturnsCorrectLength(Spdx3HashAlgorithm algorithm, int expected)
{
// Arrange
var hash = new Spdx3Hash
{
Algorithm = algorithm,
HashValue = new string('a', expected)
};
// Assert
Assert.Equal(expected, hash.GetExpectedLength());
}
[Theory]
[InlineData(Spdx3HashAlgorithm.Sha256, true)]
[InlineData(Spdx3HashAlgorithm.Sha512, true)]
[InlineData(Spdx3HashAlgorithm.Blake2b256, true)]
[InlineData(Spdx3HashAlgorithm.Md5, false)]
[InlineData(Spdx3HashAlgorithm.Sha1, false)]
public void HashAlgorithm_IsRecommended_ReturnsCorrectValue(Spdx3HashAlgorithm algorithm, bool expected)
{
Assert.Equal(expected, algorithm.IsRecommended());
}
[Theory]
[InlineData(Spdx3HashAlgorithm.Md5, true)]
[InlineData(Spdx3HashAlgorithm.Sha1, true)]
[InlineData(Spdx3HashAlgorithm.Sha256, false)]
public void HashAlgorithm_IsDeprecated_ReturnsCorrectValue(Spdx3HashAlgorithm algorithm, bool expected)
{
Assert.Equal(expected, algorithm.IsDeprecated());
}
[Fact]
public void Spdx3ProfileIdentifier_ParseUri_Works()
{
// Assert
Assert.Equal(Spdx3ProfileIdentifier.Software, Spdx3ProfileUris.ParseUri(Spdx3ProfileUris.Software));
Assert.Equal(Spdx3ProfileIdentifier.Core, Spdx3ProfileUris.ParseUri(Spdx3ProfileUris.Core));
Assert.Equal(Spdx3ProfileIdentifier.Build, Spdx3ProfileUris.ParseUri(Spdx3ProfileUris.Build));
Assert.Null(Spdx3ProfileUris.ParseUri("https://unknown.example.com"));
}
[Fact]
public void Spdx3ProfileIdentifier_Parse_WorksWithNames()
{
// Assert
Assert.Equal(Spdx3ProfileIdentifier.Software, Spdx3ProfileUris.Parse("Software"));
Assert.Equal(Spdx3ProfileIdentifier.Software, Spdx3ProfileUris.Parse("software"));
Assert.Equal(Spdx3ProfileIdentifier.Build, Spdx3ProfileUris.Parse("BUILD"));
}
[Fact]
public void Spdx3ProfileIdentifier_GetUri_Works()
{
// Assert
Assert.Equal(Spdx3ProfileUris.Software, Spdx3ProfileUris.GetUri(Spdx3ProfileIdentifier.Software));
Assert.Equal(Spdx3ProfileUris.Core, Spdx3ProfileUris.GetUri(Spdx3ProfileIdentifier.Core));
}
[Fact]
public void ExternalIdentifierExtensions_GetPurl_Works()
{
// Arrange
var identifiers = new[]
{
new Spdx3ExternalIdentifier
{
ExternalIdentifierType = Spdx3ExternalIdentifierType.PackageUrl,
Identifier = "pkg:npm/lodash@4.17.21"
},
new Spdx3ExternalIdentifier
{
ExternalIdentifierType = Spdx3ExternalIdentifierType.Cpe23,
Identifier = "cpe:2.3:a:lodash:lodash:4.17.21:*:*:*:*:*:*:*"
}
};
// Assert
Assert.Equal("pkg:npm/lodash@4.17.21", identifiers.GetPurl());
Assert.Equal("cpe:2.3:a:lodash:lodash:4.17.21:*:*:*:*:*:*:*", identifiers.GetCpe23());
}
[Fact]
public void Spdx3CreationInfo_IsValidSpecVersion_Works()
{
// Arrange
var valid = new Spdx3CreationInfo
{
SpecVersion = "3.0.1",
Created = DateTimeOffset.UtcNow
};
var invalid = new Spdx3CreationInfo
{
SpecVersion = "2.3",
Created = DateTimeOffset.UtcNow
};
// Assert
Assert.True(valid.IsValidSpecVersion());
Assert.False(invalid.IsValidSpecVersion());
}
[Fact]
public void Spdx3Document_ConformsTo_Works()
{
// Arrange
var packages = new[] { new Spdx3Package { SpdxId = "urn:test:pkg1", Name = "test" } };
var profiles = new[] { Spdx3ProfileIdentifier.Software, Spdx3ProfileIdentifier.Core };
var doc = new Spdx3Document(packages, [], profiles);
// Assert
Assert.True(doc.ConformsTo(Spdx3ProfileIdentifier.Software));
Assert.True(doc.ConformsTo(Spdx3ProfileIdentifier.Core));
Assert.False(doc.ConformsTo(Spdx3ProfileIdentifier.Build));
}
[Fact]
public void Spdx3Document_GetRootPackage_Works()
{
// Arrange
var pkg1 = new Spdx3Package { SpdxId = "urn:test:pkg1", Name = "root" };
var pkg2 = new Spdx3Package { SpdxId = "urn:test:pkg2", Name = "dep" };
var relationship = new Spdx3Relationship
{
SpdxId = "urn:test:rel1",
From = "urn:test:pkg1",
To = ["urn:test:pkg2"],
RelationshipType = Spdx3RelationshipType.Contains
};
var doc = new Spdx3Document(
[pkg1, pkg2, relationship],
[],
[Spdx3ProfileIdentifier.Software]);
// Act
var root = doc.GetRootPackage();
// Assert
Assert.NotNull(root);
Assert.Equal("root", root.Name);
}
}

View File

@@ -0,0 +1,265 @@
// <copyright file="ParserTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Spdx3.JsonLd;
using StellaOps.Spdx3.Model;
namespace StellaOps.Spdx3.Tests;
/// <summary>
/// Unit tests for SPDX 3.0.1 parser.
/// </summary>
[Trait("Category", "Unit")]
public sealed class ParserTests : IDisposable
{
private readonly Spdx3Parser _parser;
private readonly MemoryCache _cache;
public ParserTests()
{
_cache = new MemoryCache(new MemoryCacheOptions { SizeLimit = 100 });
var httpClientFactory = new Mock<IHttpClientFactory>();
var options = Options.Create(new Spdx3ContextResolverOptions { AllowRemoteContexts = false });
var resolver = new Spdx3ContextResolver(
httpClientFactory.Object,
_cache,
NullLogger<Spdx3ContextResolver>.Instance,
options,
TimeProvider.System);
_parser = new Spdx3Parser(resolver, NullLogger<Spdx3Parser>.Instance);
}
[Fact]
public async Task ParseAsync_ValidSoftwareProfile_ReturnsSuccess()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success, string.Join(", ", result.Errors.Select(e => e.Message)));
Assert.NotNull(result.Document);
Assert.True(result.Document.Packages.Length > 0);
}
[Fact]
public async Task ParseAsync_ValidLiteProfile_ReturnsSuccess()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-lite-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
Assert.Contains(Spdx3ProfileIdentifier.Lite, result.Document.Profiles);
}
[Fact]
public async Task ParseAsync_ValidBuildProfile_ReturnsSuccess()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-build-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
Assert.Contains(Spdx3ProfileIdentifier.Build, result.Document.Profiles);
}
[Fact]
public async Task ParseAsync_InvalidNoContext_ReturnsFail()
{
// Arrange
var samplePath = Path.Combine("Samples", "invalid-no-context.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.False(result.Success);
Assert.Contains(result.Errors, e => e.Code == "MISSING_CONTEXT");
}
[Fact]
public async Task ParseAsync_ExtractsPackages()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
var packages = result.Document.Packages;
Assert.Equal(2, packages.Length);
var mainPackage = packages.FirstOrDefault(p => p.Name == "example-app");
Assert.NotNull(mainPackage);
Assert.Equal("1.0.0", mainPackage.PackageVersion);
}
[Fact]
public async Task ParseAsync_ExtractsRelationships()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
var relationships = result.Document.Relationships;
Assert.Equal(2, relationships.Length);
var dependsOn = relationships.FirstOrDefault(r => r.RelationshipType == Spdx3RelationshipType.DependsOn);
Assert.NotNull(dependsOn);
Assert.Equal("urn:spdx:example:package-1", dependsOn.From);
}
[Fact]
public async Task ParseAsync_ExtractsCreationInfo()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
Assert.Contains(Spdx3ProfileIdentifier.Software, result.Document.Profiles);
Assert.Contains(Spdx3ProfileIdentifier.Core, result.Document.Profiles);
}
[Fact]
public async Task ParseAsync_ExtractsPurl()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
var purls = result.Document.GetAllPurls().ToList();
Assert.Contains("pkg:npm/example-app@1.0.0", purls);
Assert.Contains("pkg:npm/lodash@4.17.21", purls);
}
[Fact]
public async Task ParseAsync_NonexistentFile_ReturnsFail()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync("nonexistent-file.json", ct);
// Assert
Assert.False(result.Success);
Assert.Contains(result.Errors, e => e.Code == "FILE_NOT_FOUND");
}
[Fact]
public async Task ParseFromJsonAsync_ValidJson_Parses()
{
// Arrange
var json = """
{
"@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
"@graph": [
{
"@type": "software_Package",
"spdxId": "urn:test:pkg1",
"name": "test-package",
"packageVersion": "1.0.0"
}
]
}
""";
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseFromJsonAsync(json, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
Assert.Single(result.Document.Packages);
}
[Fact]
public async Task ParseAsync_DocumentGetById_ReturnsElement()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
var pkg = result.Document.GetById<Model.Software.Spdx3Package>("urn:spdx:example:package-1");
Assert.NotNull(pkg);
Assert.Equal("example-app", pkg.Name);
}
[Fact]
public async Task ParseAsync_DocumentGetDependencies_ReturnsDeps()
{
// Arrange
var samplePath = Path.Combine("Samples", "valid-software-profile.json");
var ct = TestContext.Current.CancellationToken;
// Act
var result = await _parser.ParseAsync(samplePath, ct);
// Assert
Assert.True(result.Success);
Assert.NotNull(result.Document);
var deps = result.Document.GetDependencies("urn:spdx:example:package-1").ToList();
Assert.Single(deps);
Assert.Equal("lodash", deps[0].Name);
}
public void Dispose()
{
_cache.Dispose();
}
}

View File

@@ -0,0 +1,5 @@
{
"invalid": "json",
"no_context": true,
"spdxVersion": "SPDX-2.3"
}

View File

@@ -0,0 +1,103 @@
{
"@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
"@graph": [
{
"@type": "software_SpdxDocument",
"spdxId": "urn:spdx:build:document-1",
"name": "Build Profile SBOM with Attestation",
"creationInfo": {
"@id": "_:creationInfoBuild",
"@type": "CreationInfo",
"specVersion": "3.0.1",
"created": "2026-01-07T14:00:00Z",
"createdBy": ["urn:spdx:build:ci-system"],
"createdUsing": ["urn:spdx:build:stellaops-attestor"],
"profile": ["Build", "Software", "Core"],
"dataLicense": "CC0-1.0"
},
"rootElement": ["urn:spdx:build:package-artifact"]
},
{
"@type": "Tool",
"spdxId": "urn:spdx:build:ci-system",
"name": "GitHub Actions"
},
{
"@type": "Tool",
"spdxId": "urn:spdx:build:stellaops-attestor",
"name": "StellaOps Attestor v1.2.0"
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:build:package-artifact",
"creationInfo": "_:creationInfoBuild",
"name": "stellaops-scanner",
"packageVersion": "1.5.0",
"primaryPurpose": "Application",
"downloadLocation": "https://github.com/stellaops/scanner/releases/download/v1.5.0/scanner-linux-amd64",
"buildTime": "2026-01-07T13:45:00Z",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:github/stellaops/scanner@v1.5.0"
},
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "GitOid",
"identifier": "abc123def456789012345678901234567890abcd"
}
],
"verifiedUsing": [
{
"@type": "Hash",
"algorithm": "sha256",
"hashValue": "fedcba0987654321fedcba0987654321fedcba0987654321fedcba0987654321"
},
{
"@type": "Hash",
"algorithm": "sha512",
"hashValue": "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
}
],
"externalRef": [
{
"@type": "ExternalRef",
"externalRefType": "SecurityAdvisory",
"locator": ["https://github.com/stellaops/scanner/security/advisories"]
}
]
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:build:package-source",
"creationInfo": "_:creationInfoBuild",
"name": "stellaops-scanner-source",
"packageVersion": "1.5.0",
"primaryPurpose": "Source",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "Swhid",
"identifier": "swh:1:cnt:abc123456789abcdef0123456789abcdef01234567"
}
]
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:build:rel-generated",
"creationInfo": "_:creationInfoBuild",
"from": "urn:spdx:build:package-artifact",
"to": ["urn:spdx:build:package-source"],
"relationshipType": "GeneratedFrom"
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:build:rel-tool",
"creationInfo": "_:creationInfoBuild",
"from": "urn:spdx:build:package-artifact",
"to": ["urn:spdx:build:ci-system"],
"relationshipType": "BuildToolOf"
}
]
}

View File

@@ -0,0 +1,90 @@
{
"@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
"@graph": [
{
"@type": "software_SpdxDocument",
"spdxId": "urn:spdx:lite:document-1",
"name": "Lite Profile SBOM",
"creationInfo": {
"@id": "_:creationInfoLite",
"@type": "CreationInfo",
"specVersion": "3.0.1",
"created": "2026-01-07T12:00:00Z",
"createdBy": ["urn:spdx:lite:scanner"],
"profile": ["Lite", "Core"],
"dataLicense": "CC0-1.0"
},
"rootElement": ["urn:spdx:lite:package-main"]
},
{
"@type": "Tool",
"spdxId": "urn:spdx:lite:scanner",
"name": "StellaOps CI Scanner"
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:lite:package-main",
"creationInfo": "_:creationInfoLite",
"name": "my-service",
"packageVersion": "2.1.0",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:docker/my-service@2.1.0"
}
],
"verifiedUsing": [
{
"@type": "Hash",
"algorithm": "sha256",
"hashValue": "sha256:aabbccdd1122334455667788990011223344556677889900aabbccdd11223344"
}
]
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:lite:package-dep1",
"creationInfo": "_:creationInfoLite",
"name": "express",
"packageVersion": "4.18.2",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:npm/express@4.18.2"
}
]
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:lite:package-dep2",
"creationInfo": "_:creationInfoLite",
"name": "typescript",
"packageVersion": "5.3.3",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:npm/typescript@5.3.3"
}
]
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:lite:rel-1",
"creationInfo": "_:creationInfoLite",
"from": "urn:spdx:lite:package-main",
"to": ["urn:spdx:lite:package-dep1"],
"relationshipType": "DependsOn"
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:lite:rel-2",
"creationInfo": "_:creationInfoLite",
"from": "urn:spdx:lite:package-main",
"to": ["urn:spdx:lite:package-dep2"],
"relationshipType": "DependsOn"
}
]
}

View File

@@ -0,0 +1,133 @@
{
"@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
"@graph": [
{
"@type": "software_SpdxDocument",
"spdxId": "urn:spdx:security:document-1",
"name": "Security Profile SBOM with Vulnerability Data",
"creationInfo": {
"@id": "_:creationInfoSec",
"@type": "CreationInfo",
"specVersion": "3.0.1",
"created": "2026-01-07T16:00:00Z",
"createdBy": ["urn:spdx:security:scanner"],
"profile": ["Security", "Software", "Core"],
"dataLicense": "CC0-1.0"
},
"rootElement": ["urn:spdx:security:package-main"]
},
{
"@type": "Tool",
"spdxId": "urn:spdx:security:scanner",
"name": "StellaOps Vulnerability Scanner v2.0.0"
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:security:package-main",
"creationInfo": "_:creationInfoSec",
"name": "vulnerable-app",
"packageVersion": "1.0.0",
"primaryPurpose": "Application",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:npm/vulnerable-app@1.0.0"
}
],
"verifiedUsing": [
{
"@type": "Hash",
"algorithm": "sha256",
"hashValue": "abc123def456789012345678901234567890abcdef123456789012345678901234"
}
]
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:security:package-vulnerable-dep",
"creationInfo": "_:creationInfoSec",
"name": "lodash",
"packageVersion": "4.17.15",
"primaryPurpose": "Library",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:npm/lodash@4.17.15"
},
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "Cpe23",
"identifier": "cpe:2.3:a:lodash:lodash:4.17.15:*:*:*:*:*:*:*"
}
]
},
{
"@type": "security_Vulnerability",
"spdxId": "urn:spdx:security:vuln-cve-2020-8203",
"creationInfo": "_:creationInfoSec",
"name": "CVE-2020-8203",
"summary": "Prototype Pollution in lodash",
"description": "Prototype pollution in zipObjectDeep in lodash before 4.17.20 allows an attacker to modify the prototype of Object.prototype.",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "Cve",
"identifier": "CVE-2020-8203"
}
],
"externalRef": [
{
"@type": "ExternalRef",
"externalRefType": "SecurityAdvisory",
"locator": ["https://nvd.nist.gov/vuln/detail/CVE-2020-8203"]
}
],
"publishedTime": "2020-07-15T00:00:00Z",
"modifiedTime": "2023-01-20T00:00:00Z"
},
{
"@type": "security_VulnAssessmentRelationship",
"spdxId": "urn:spdx:security:assessment-1",
"creationInfo": "_:creationInfoSec",
"from": "urn:spdx:security:vuln-cve-2020-8203",
"to": ["urn:spdx:security:package-vulnerable-dep"],
"relationshipType": "AffectsElement",
"assessedElement": "urn:spdx:security:package-vulnerable-dep",
"suppliedBy": "urn:spdx:security:scanner",
"publishedTime": "2026-01-07T16:00:00Z"
},
{
"@type": "security_VexVulnAssessmentRelationship",
"spdxId": "urn:spdx:security:vex-1",
"creationInfo": "_:creationInfoSec",
"from": "urn:spdx:security:vuln-cve-2020-8203",
"to": ["urn:spdx:security:package-main"],
"relationshipType": "HasAssessmentFor",
"vexVersion": "1.0.0",
"statusNotes": "The vulnerable function is not called in this application",
"status": "not_affected",
"justification": "vulnerable_code_not_in_execute_path"
},
{
"@type": "security_CvssV3VulnAssessmentRelationship",
"spdxId": "urn:spdx:security:cvss-1",
"creationInfo": "_:creationInfoSec",
"from": "urn:spdx:security:vuln-cve-2020-8203",
"to": ["urn:spdx:security:package-vulnerable-dep"],
"relationshipType": "HasAssessmentFor",
"score": 7.4,
"severity": "High",
"vectorString": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:N/I:H/A:H"
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:security:rel-depends",
"creationInfo": "_:creationInfoSec",
"from": "urn:spdx:security:package-main",
"to": ["urn:spdx:security:package-vulnerable-dep"],
"relationshipType": "DependsOn"
}
]
}

View File

@@ -0,0 +1,113 @@
{
"@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
"@graph": [
{
"@type": "software_SpdxDocument",
"spdxId": "urn:spdx:example:document-1",
"name": "Example SPDX 3.0.1 Document",
"creationInfo": {
"@id": "_:creationInfo1",
"@type": "CreationInfo",
"specVersion": "3.0.1",
"created": "2026-01-07T10:00:00Z",
"createdBy": ["urn:spdx:example:stellaops-tool"],
"profile": ["Software", "Core"],
"dataLicense": "CC0-1.0"
},
"rootElement": ["urn:spdx:example:package-1"],
"element": [
"urn:spdx:example:package-1",
"urn:spdx:example:package-2",
"urn:spdx:example:file-1"
]
},
{
"@type": "Tool",
"spdxId": "urn:spdx:example:stellaops-tool",
"name": "StellaOps Scanner v1.0.0"
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:example:package-1",
"creationInfo": "_:creationInfo1",
"name": "example-app",
"packageVersion": "1.0.0",
"downloadLocation": "https://example.com/example-app-1.0.0.tar.gz",
"homePage": "https://example.com",
"primaryPurpose": "Application",
"copyrightText": "Copyright 2026 Example Inc.",
"suppliedBy": "urn:spdx:example:org-1",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:npm/example-app@1.0.0"
}
],
"verifiedUsing": [
{
"@type": "Hash",
"algorithm": "sha256",
"hashValue": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
}
]
},
{
"@type": "software_Package",
"spdxId": "urn:spdx:example:package-2",
"creationInfo": "_:creationInfo1",
"name": "lodash",
"packageVersion": "4.17.21",
"primaryPurpose": "Library",
"externalIdentifier": [
{
"@type": "ExternalIdentifier",
"externalIdentifierType": "PackageUrl",
"identifier": "pkg:npm/lodash@4.17.21"
}
],
"verifiedUsing": [
{
"@type": "Hash",
"algorithm": "sha256",
"hashValue": "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"
}
]
},
{
"@type": "software_File",
"spdxId": "urn:spdx:example:file-1",
"creationInfo": "_:creationInfo1",
"name": "index.js",
"contentType": "application/javascript",
"verifiedUsing": [
{
"@type": "Hash",
"algorithm": "sha256",
"hashValue": "abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"
}
]
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:example:rel-1",
"creationInfo": "_:creationInfo1",
"from": "urn:spdx:example:package-1",
"to": ["urn:spdx:example:package-2"],
"relationshipType": "DependsOn"
},
{
"@type": "Relationship",
"spdxId": "urn:spdx:example:rel-2",
"creationInfo": "_:creationInfo1",
"from": "urn:spdx:example:package-1",
"to": ["urn:spdx:example:file-1"],
"relationshipType": "Contains"
},
{
"@type": "Organization",
"spdxId": "urn:spdx:example:org-1",
"name": "Example Inc."
}
]
}

View File

@@ -0,0 +1,34 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Moq" />
<PackageReference Include="coverlet.collector">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Spdx3\StellaOps.Spdx3.csproj" />
</ItemGroup>
<ItemGroup>
<None Update="Samples\*.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project>

View File

@@ -0,0 +1,292 @@
// <copyright file="ValidatorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using StellaOps.Spdx3.Model;
using StellaOps.Spdx3.Model.Software;
using StellaOps.Spdx3.Validation;
namespace StellaOps.Spdx3.Tests;
/// <summary>
/// Unit tests for SPDX 3.0.1 validator.
/// </summary>
[Trait("Category", "Unit")]
public sealed class ValidatorTests
{
private readonly Spdx3Validator _validator = new();
[Fact]
public void Validate_ValidDocument_ReturnsValid()
{
// Arrange
var doc = CreateValidDocument();
// Act
var result = _validator.Validate(doc);
// Assert
Assert.True(result.IsValid);
Assert.Empty(result.Errors);
}
[Fact]
public void Validate_EmptyDocument_ReturnsError()
{
// Arrange
var doc = new Spdx3Document([], [], []);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == "EMPTY_DOCUMENT");
}
[Fact]
public void Validate_DuplicateSpdxId_ReturnsError()
{
// Arrange
var pkg1 = new Spdx3Package { SpdxId = "urn:test:dup", Name = "pkg1" };
var pkg2 = new Spdx3Package { SpdxId = "urn:test:dup", Name = "pkg2" };
var doc = new Spdx3Document([pkg1, pkg2], [], [Spdx3ProfileIdentifier.Software]);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == "DUPLICATE_SPDX_ID");
}
[Fact]
public void Validate_DanglingRelationship_ReturnsWarning()
{
// Arrange
var pkg = new Spdx3Package { SpdxId = "urn:test:pkg1", Name = "pkg1" };
var rel = new Spdx3Relationship
{
SpdxId = "urn:test:rel1",
From = "urn:test:pkg1",
To = ["urn:test:nonexistent"],
RelationshipType = Spdx3RelationshipType.DependsOn
};
var doc = new Spdx3Document([pkg, rel], [], [Spdx3ProfileIdentifier.Software]);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.True(result.IsValid); // Warnings don't fail validation by default
Assert.Contains(result.Warnings, w => w.Code == "DANGLING_RELATIONSHIP_TO");
}
[Fact]
public void Validate_EmptyRelationshipTo_ReturnsError()
{
// Arrange
var pkg = new Spdx3Package { SpdxId = "urn:test:pkg1", Name = "pkg1" };
var rel = new Spdx3Relationship
{
SpdxId = "urn:test:rel1",
From = "urn:test:pkg1",
To = [],
RelationshipType = Spdx3RelationshipType.DependsOn
};
var doc = new Spdx3Document([pkg, rel], [], [Spdx3ProfileIdentifier.Software]);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == "EMPTY_RELATIONSHIP_TO");
}
[Fact]
public void Validate_InvalidPurl_ReturnsWarning()
{
// Arrange
var pkg = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "pkg1",
ExternalIdentifier =
[
new Spdx3ExternalIdentifier
{
ExternalIdentifierType = Spdx3ExternalIdentifierType.PackageUrl,
Identifier = "not-a-valid-purl"
}
]
};
var doc = new Spdx3Document([pkg], [], [Spdx3ProfileIdentifier.Software]);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.Contains(result.Warnings, w => w.Code == "INVALID_PURL_FORMAT");
}
[Fact]
public void Validate_InvalidHashFormat_ReturnsError()
{
// Arrange
var pkg = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "pkg1",
VerifiedUsing =
[
new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "not-hex-value!"
}
]
};
var doc = new Spdx3Document([pkg], [], [Spdx3ProfileIdentifier.Software]);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == "INVALID_HASH_FORMAT");
}
[Fact]
public void Validate_DeprecatedHashAlgorithm_ReturnsWarning()
{
// Arrange
var pkg = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "pkg1",
VerifiedUsing =
[
new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Md5,
HashValue = "d41d8cd98f00b204e9800998ecf8427e"
}
]
};
var doc = new Spdx3Document([pkg], [], [Spdx3ProfileIdentifier.Software]);
// Act
var result = _validator.Validate(doc);
// Assert
Assert.Contains(result.Warnings, w => w.Code == "DEPRECATED_HASH_ALGORITHM");
}
[Fact]
public void Validate_RequiredProfileMissing_ReturnsError()
{
// Arrange
var doc = CreateValidDocument();
var options = new Spdx3ValidationOptions
{
RequiredProfiles = [Spdx3ProfileIdentifier.Build]
};
// Act
var result = _validator.Validate(doc, options);
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == "MISSING_REQUIRED_PROFILE");
}
[Fact]
public void Validate_TreatWarningsAsErrors_ConvertsWarnings()
{
// Arrange
var pkg = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "pkg1",
VerifiedUsing =
[
new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Md5,
HashValue = "d41d8cd98f00b204e9800998ecf8427e"
}
]
};
var doc = new Spdx3Document([pkg], [], [Spdx3ProfileIdentifier.Software]);
var options = new Spdx3ValidationOptions { TreatWarningsAsErrors = true };
// Act
var result = _validator.Validate(doc, options);
// Assert
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Code == "DEPRECATED_HASH_ALGORITHM");
Assert.Empty(result.Warnings);
}
[Fact]
public void Validate_Info_ContainsStats()
{
// Arrange
var doc = CreateValidDocument();
// Act
var result = _validator.Validate(doc);
// Assert
Assert.Contains(result.Info, i => i.Code == "DOCUMENT_STATS");
}
private static Spdx3Document CreateValidDocument()
{
var pkg1 = new Spdx3Package
{
SpdxId = "urn:test:pkg1",
Name = "root-package",
PackageVersion = "1.0.0",
VerifiedUsing =
[
new Spdx3Hash
{
Algorithm = Spdx3HashAlgorithm.Sha256,
HashValue = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
}
]
};
var pkg2 = new Spdx3Package
{
SpdxId = "urn:test:pkg2",
Name = "dep-package",
PackageVersion = "2.0.0",
ExternalIdentifier =
[
new Spdx3ExternalIdentifier
{
ExternalIdentifierType = Spdx3ExternalIdentifierType.PackageUrl,
Identifier = "pkg:npm/dep-package@2.0.0"
}
]
};
var rel = new Spdx3Relationship
{
SpdxId = "urn:test:rel1",
From = "urn:test:pkg1",
To = ["urn:test:pkg2"],
RelationshipType = Spdx3RelationshipType.DependsOn
};
return new Spdx3Document(
[pkg1, pkg2, rel],
[],
[Spdx3ProfileIdentifier.Software, Spdx3ProfileIdentifier.Core]);
}
}

View File

@@ -0,0 +1,152 @@
// <copyright file="VersionDetectorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Spdx3.Tests;
/// <summary>
/// Unit tests for SPDX version detection.
/// </summary>
[Trait("Category", "Unit")]
public sealed class VersionDetectorTests
{
[Fact]
public void Detect_Spdx301JsonLd_ReturnsCorrectVersion()
{
// Arrange
var json = """
{
"@context": "https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
"@graph": []
}
""";
// Act
var result = Spdx3VersionDetector.Detect(json);
// Assert
Assert.Equal(Spdx3VersionDetector.SpdxVersion.Spdx301, result.Version);
Assert.True(result.IsJsonLd);
}
[Fact]
public void Detect_Spdx23_ReturnsCorrectVersion()
{
// Arrange
var json = """
{
"spdxVersion": "SPDX-2.3",
"dataLicense": "CC0-1.0",
"SPDXID": "SPDXRef-DOCUMENT"
}
""";
// Act
var result = Spdx3VersionDetector.Detect(json);
// Assert
Assert.Equal(Spdx3VersionDetector.SpdxVersion.Spdx23, result.Version);
Assert.False(result.IsJsonLd);
Assert.Equal("SPDX-2.3", result.VersionString);
}
[Fact]
public void Detect_Spdx22_ReturnsCorrectVersion()
{
// Arrange
var json = """
{
"spdxVersion": "SPDX-2.2",
"dataLicense": "CC0-1.0",
"SPDXID": "SPDXRef-DOCUMENT"
}
""";
// Act
var result = Spdx3VersionDetector.Detect(json);
// Assert
Assert.Equal(Spdx3VersionDetector.SpdxVersion.Spdx22, result.Version);
Assert.False(result.IsJsonLd);
}
[Fact]
public void Detect_Unknown_ReturnsUnknown()
{
// Arrange
var json = """
{
"random": "data",
"notSpdx": true
}
""";
// Act
var result = Spdx3VersionDetector.Detect(json);
// Assert
Assert.Equal(Spdx3VersionDetector.SpdxVersion.Unknown, result.Version);
}
[Fact]
public void Detect_ContextWithArray_DetectsVersion()
{
// Arrange
var json = """
{
"@context": [
"https://spdx.org/rdf/3.0.1/spdx-context.jsonld",
{ "custom": "http://example.org/custom" }
],
"@graph": []
}
""";
// Act
var result = Spdx3VersionDetector.Detect(json);
// Assert
Assert.Equal(Spdx3VersionDetector.SpdxVersion.Spdx301, result.Version);
Assert.True(result.IsJsonLd);
}
[Fact]
public void Detect_SpecVersionInGraph_DetectsVersion()
{
// Arrange
var json = """
{
"@context": "https://example.com/context",
"@graph": [
{
"creationInfo": {
"specVersion": "3.0.1"
}
}
]
}
""";
// Act
var result = Spdx3VersionDetector.Detect(json);
// Assert
Assert.Equal(Spdx3VersionDetector.SpdxVersion.Spdx301, result.Version);
}
[Theory]
[InlineData(Spdx3VersionDetector.SpdxVersion.Spdx22, "Use SpdxParser (SPDX 2.x parser)")]
[InlineData(Spdx3VersionDetector.SpdxVersion.Spdx23, "Use SpdxParser (SPDX 2.x parser)")]
[InlineData(Spdx3VersionDetector.SpdxVersion.Spdx301, "Use Spdx3Parser (SPDX 3.0.1 parser)")]
[InlineData(Spdx3VersionDetector.SpdxVersion.Unknown, "Unknown format - manual inspection required")]
public void GetParserRecommendation_ReturnsCorrectRecommendation(
Spdx3VersionDetector.SpdxVersion version,
string expected)
{
// Act
var recommendation = Spdx3VersionDetector.GetParserRecommendation(version);
// Assert
Assert.Equal(expected, recommendation);
}
}