Add unit tests for SBOM ingestion and transformation
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Implement `SbomIngestServiceCollectionExtensionsTests` to verify the SBOM ingestion pipeline exports snapshots correctly.
- Create `SbomIngestTransformerTests` to ensure the transformation produces expected nodes and edges, including deduplication of license nodes and normalization of timestamps.
- Add `SbomSnapshotExporterTests` to test the export functionality for manifest, adjacency, nodes, and edges.
- Introduce `VexOverlayTransformerTests` to validate the transformation of VEX nodes and edges.
- Set up project file for the test project with necessary dependencies and configurations.
- Include JSON fixture files for testing purposes.
This commit is contained in:
master
2025-11-04 07:49:39 +02:00
parent f72c5c513a
commit 2eb6852d34
491 changed files with 39445 additions and 3917 deletions

View File

@@ -0,0 +1,210 @@
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
using StellaOps.Findings.Ledger.Infrastructure;
namespace StellaOps.Findings.Ledger.Services;
public interface ILedgerEventWriteService
{
Task<LedgerWriteResult> AppendAsync(LedgerEventDraft draft, CancellationToken cancellationToken);
}
public sealed class LedgerEventWriteService : ILedgerEventWriteService
{
private readonly ILedgerEventRepository _repository;
private readonly IMerkleAnchorScheduler _merkleAnchorScheduler;
private readonly ILogger<LedgerEventWriteService> _logger;
public LedgerEventWriteService(
ILedgerEventRepository repository,
IMerkleAnchorScheduler merkleAnchorScheduler,
ILogger<LedgerEventWriteService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_merkleAnchorScheduler = merkleAnchorScheduler ?? throw new ArgumentNullException(nameof(merkleAnchorScheduler));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<LedgerWriteResult> AppendAsync(LedgerEventDraft draft, CancellationToken cancellationToken)
{
var validationErrors = ValidateDraft(draft);
if (validationErrors.Count > 0)
{
return LedgerWriteResult.ValidationFailed([.. validationErrors]);
}
var existing = await _repository.GetByEventIdAsync(draft.TenantId, draft.EventId, cancellationToken).ConfigureAwait(false);
if (existing is not null)
{
var canonicalJson = LedgerCanonicalJsonSerializer.Serialize(draft.CanonicalEnvelope);
if (!string.Equals(existing.CanonicalJson, canonicalJson, StringComparison.Ordinal))
{
return LedgerWriteResult.Conflict(
"event_id_conflict",
$"Event '{draft.EventId}' already exists with a different payload.");
}
return LedgerWriteResult.Idempotent(existing);
}
var chainHead = await _repository.GetChainHeadAsync(draft.TenantId, draft.ChainId, cancellationToken).ConfigureAwait(false);
var expectedSequence = chainHead is null ? 1 : chainHead.SequenceNumber + 1;
if (draft.SequenceNumber != expectedSequence)
{
return LedgerWriteResult.Conflict(
"sequence_mismatch",
$"Sequence number '{draft.SequenceNumber}' does not match expected '{expectedSequence}'.");
}
var previousHash = chainHead?.EventHash ?? LedgerEventConstants.EmptyHash;
if (draft.ProvidedPreviousHash is not null && !string.Equals(draft.ProvidedPreviousHash, previousHash, StringComparison.OrdinalIgnoreCase))
{
return LedgerWriteResult.Conflict(
"previous_hash_mismatch",
$"Provided previous hash '{draft.ProvidedPreviousHash}' does not match chain head hash '{previousHash}'.");
}
var canonicalEnvelope = LedgerCanonicalJsonSerializer.Canonicalize(draft.CanonicalEnvelope);
var hashResult = LedgerHashing.ComputeHashes(canonicalEnvelope, draft.SequenceNumber);
var eventBody = (JsonObject)canonicalEnvelope.DeepClone();
var record = new LedgerEventRecord(
draft.TenantId,
draft.ChainId,
draft.SequenceNumber,
draft.EventId,
draft.EventType,
draft.PolicyVersion,
draft.FindingId,
draft.ArtifactId,
draft.SourceRunId,
draft.ActorId,
draft.ActorType,
draft.OccurredAt,
draft.RecordedAt,
eventBody,
hashResult.EventHash,
previousHash,
hashResult.MerkleLeafHash,
hashResult.CanonicalJson);
try
{
await _repository.AppendAsync(record, cancellationToken).ConfigureAwait(false);
await _merkleAnchorScheduler.EnqueueAsync(record, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (IsDuplicateKeyException(ex))
{
_logger.LogWarning(ex, "Ledger append detected concurrent duplicate for {EventId}", draft.EventId);
var persisted = await _repository.GetByEventIdAsync(draft.TenantId, draft.EventId, cancellationToken).ConfigureAwait(false);
if (persisted is null)
{
return LedgerWriteResult.Conflict("append_failed", "Ledger append failed due to concurrent write.");
}
if (!string.Equals(persisted.CanonicalJson, record.CanonicalJson, StringComparison.Ordinal))
{
return LedgerWriteResult.Conflict("event_id_conflict", "Ledger append raced with conflicting payload.");
}
return LedgerWriteResult.Idempotent(persisted);
}
return LedgerWriteResult.Success(record);
}
private static bool IsDuplicateKeyException(Exception exception)
{
if (exception is null)
{
return false;
}
if (exception is LedgerDuplicateEventException)
{
return true;
}
if (exception.GetType().Name.Contains("Unique", StringComparison.OrdinalIgnoreCase))
{
return true;
}
if (exception.InnerException is not null)
{
return IsDuplicateKeyException(exception.InnerException);
}
return false;
}
private static List<string> ValidateDraft(LedgerEventDraft draft)
{
var errors = new List<string>();
if (draft is null)
{
errors.Add("draft_required");
return errors;
}
if (string.IsNullOrWhiteSpace(draft.TenantId))
{
errors.Add("tenant_id_required");
}
if (draft.SequenceNumber < 1)
{
errors.Add("sequence_must_be_positive");
}
if (draft.EventId == Guid.Empty)
{
errors.Add("event_id_required");
}
if (draft.ChainId == Guid.Empty)
{
errors.Add("chain_id_required");
}
if (!LedgerEventConstants.SupportedEventTypes.Contains(draft.EventType))
{
errors.Add($"event_type_invalid:{draft.EventType}");
}
if (!LedgerEventConstants.SupportedActorTypes.Contains(draft.ActorType))
{
errors.Add($"actor_type_invalid:{draft.ActorType}");
}
if (string.IsNullOrWhiteSpace(draft.PolicyVersion))
{
errors.Add("policy_version_required");
}
if (string.IsNullOrWhiteSpace(draft.FindingId))
{
errors.Add("finding_id_required");
}
if (string.IsNullOrWhiteSpace(draft.ArtifactId))
{
errors.Add("artifact_id_required");
}
if (draft.Payload is null)
{
errors.Add("payload_required");
}
if (draft.CanonicalEnvelope is null)
{
errors.Add("canonical_envelope_required");
}
return errors;
}
}

View File

@@ -0,0 +1,247 @@
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
using StellaOps.Findings.Ledger.Infrastructure.Policy;
namespace StellaOps.Findings.Ledger.Services;
public static class LedgerProjectionReducer
{
public static ProjectionReduceResult Reduce(
LedgerEventRecord record,
FindingProjection? current,
PolicyEvaluationResult evaluation)
{
ArgumentNullException.ThrowIfNull(record);
ArgumentNullException.ThrowIfNull(evaluation);
var eventObject = record.EventBody["event"]?.AsObject()
?? throw new InvalidOperationException("Ledger event payload is missing 'event' object.");
var payload = eventObject["payload"] as JsonObject;
var status = evaluation.Status ?? DetermineStatus(record.EventType, payload, current?.Status);
var severity = evaluation.Severity ?? DetermineSeverity(payload, current?.Severity);
var labels = CloneLabels(evaluation.Labels);
MergeLabels(labels, payload);
var explainRef = evaluation.ExplainRef ?? DetermineExplainRef(payload, current?.ExplainRef);
var rationale = CloneArray(evaluation.Rationale);
if (rationale.Count == 0 && !string.IsNullOrWhiteSpace(explainRef))
{
rationale.Add(explainRef);
}
var updatedAt = record.RecordedAt;
var provisional = new FindingProjection(
record.TenantId,
record.FindingId,
record.PolicyVersion,
status,
severity,
labels,
record.EventId,
explainRef,
rationale,
updatedAt,
string.Empty);
var cycleHash = ProjectionHashing.ComputeCycleHash(provisional);
var projection = provisional with { CycleHash = cycleHash };
var historyEntry = new FindingHistoryEntry(
record.TenantId,
record.FindingId,
record.PolicyVersion,
record.EventId,
projection.Status,
projection.Severity,
record.ActorId,
DetermineComment(payload),
record.OccurredAt);
var actionEntry = CreateActionEntry(record, payload);
return new ProjectionReduceResult(projection, historyEntry, actionEntry);
}
private static string DetermineStatus(string eventType, JsonObject? payload, string? currentStatus)
{
var candidate = ExtractString(payload, "status") ?? currentStatus;
return eventType switch
{
LedgerEventConstants.EventFindingCreated => candidate ?? "affected",
LedgerEventConstants.EventFindingStatusChanged => candidate ?? currentStatus ?? "affected",
LedgerEventConstants.EventFindingClosed => candidate ?? "closed",
LedgerEventConstants.EventFindingAcceptedRisk => candidate ?? "accepted_risk",
_ => candidate ?? currentStatus ?? "affected"
};
}
private static decimal? DetermineSeverity(JsonObject? payload, decimal? current)
{
if (payload is null)
{
return current;
}
if (payload.TryGetPropertyValue("severity", out var severityNode))
{
if (TryConvertDecimal(severityNode, out var severity))
{
return severity;
}
if (severityNode is JsonValue value && value.TryGetValue(out string? severityString)
&& decimal.TryParse(severityString, out var severityFromString))
{
return severityFromString;
}
}
return current;
}
private static void MergeLabels(JsonObject target, JsonObject? payload)
{
if (payload is null)
{
return;
}
if (payload.TryGetPropertyValue("labels", out var labelsNode) && labelsNode is JsonObject labelUpdates)
{
foreach (var property in labelUpdates)
{
if (property.Value is null || property.Value.GetValueKind() == JsonValueKind.Null)
{
target.Remove(property.Key);
}
else
{
target[property.Key] = property.Value.DeepClone();
}
}
}
if (payload.TryGetPropertyValue("labelsRemove", out var removeNode) && removeNode is JsonArray removeArray)
{
foreach (var item in removeArray)
{
if (item is JsonValue value && value.TryGetValue(out string? key) && !string.IsNullOrWhiteSpace(key))
{
target.Remove(key);
}
}
}
}
private static string? DetermineExplainRef(JsonObject? payload, string? current)
{
var explainRef = ExtractString(payload, "explainRef") ?? ExtractString(payload, "explain_ref");
return explainRef ?? current;
}
private static string? DetermineComment(JsonObject? payload)
{
return ExtractString(payload, "comment")
?? ExtractString(payload, "justification")
?? ExtractString(payload, "note");
}
private static TriageActionEntry? CreateActionEntry(LedgerEventRecord record, JsonObject? payload)
{
var actionType = record.EventType switch
{
LedgerEventConstants.EventFindingStatusChanged => "status_change",
LedgerEventConstants.EventFindingCommentAdded => "comment",
LedgerEventConstants.EventFindingAssignmentChanged => "assign",
LedgerEventConstants.EventFindingRemediationPlanAdded => "remediation_plan",
LedgerEventConstants.EventFindingAcceptedRisk => "accept_risk",
LedgerEventConstants.EventFindingAttachmentAdded => "attach_evidence",
LedgerEventConstants.EventFindingClosed => "close",
_ => null
};
if (actionType is null)
{
return null;
}
var payloadClone = payload?.DeepClone()?.AsObject() ?? new JsonObject();
return new TriageActionEntry(
record.TenantId,
record.EventId,
record.EventId,
record.FindingId,
actionType,
payloadClone,
record.RecordedAt,
record.ActorId);
}
private static JsonObject CloneLabels(JsonObject? source)
{
return source is null ? new JsonObject() : (JsonObject)source.DeepClone();
}
private static JsonArray CloneArray(JsonArray source)
{
ArgumentNullException.ThrowIfNull(source);
var clone = new JsonArray();
foreach (var item in source)
{
clone.Add(item?.DeepClone());
}
return clone;
}
private static string? ExtractString(JsonObject? obj, string propertyName)
{
if (obj is null)
{
return null;
}
if (!obj.TryGetPropertyValue(propertyName, out var node) || node is null)
{
return null;
}
if (node is JsonValue value && value.TryGetValue(out string? result))
{
return string.IsNullOrWhiteSpace(result) ? null : result;
}
return node.ToString();
}
private static bool TryConvertDecimal(JsonNode? node, out decimal value)
{
switch (node)
{
case null:
value = default;
return false;
case JsonValue jsonValue when jsonValue.TryGetValue(out decimal decimalValue):
value = decimalValue;
return true;
case JsonValue jsonValue when jsonValue.TryGetValue(out double doubleValue):
value = Convert.ToDecimal(doubleValue);
return true;
default:
if (decimal.TryParse(node.ToString(), out var parsed))
{
value = parsed;
return true;
}
value = default;
return false;
}
}
}