Add unit tests for SBOM ingestion and transformation
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Implement `SbomIngestServiceCollectionExtensionsTests` to verify the SBOM ingestion pipeline exports snapshots correctly. - Create `SbomIngestTransformerTests` to ensure the transformation produces expected nodes and edges, including deduplication of license nodes and normalization of timestamps. - Add `SbomSnapshotExporterTests` to test the export functionality for manifest, adjacency, nodes, and edges. - Introduce `VexOverlayTransformerTests` to validate the transformation of VEX nodes and edges. - Set up project file for the test project with necessary dependencies and configurations. - Include JSON fixture files for testing purposes.
This commit is contained in:
@@ -0,0 +1,210 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Hashing;
|
||||
using StellaOps.Findings.Ledger.Infrastructure;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Services;
|
||||
|
||||
public interface ILedgerEventWriteService
|
||||
{
|
||||
Task<LedgerWriteResult> AppendAsync(LedgerEventDraft draft, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class LedgerEventWriteService : ILedgerEventWriteService
|
||||
{
|
||||
private readonly ILedgerEventRepository _repository;
|
||||
private readonly IMerkleAnchorScheduler _merkleAnchorScheduler;
|
||||
private readonly ILogger<LedgerEventWriteService> _logger;
|
||||
|
||||
public LedgerEventWriteService(
|
||||
ILedgerEventRepository repository,
|
||||
IMerkleAnchorScheduler merkleAnchorScheduler,
|
||||
ILogger<LedgerEventWriteService> logger)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_merkleAnchorScheduler = merkleAnchorScheduler ?? throw new ArgumentNullException(nameof(merkleAnchorScheduler));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<LedgerWriteResult> AppendAsync(LedgerEventDraft draft, CancellationToken cancellationToken)
|
||||
{
|
||||
var validationErrors = ValidateDraft(draft);
|
||||
if (validationErrors.Count > 0)
|
||||
{
|
||||
return LedgerWriteResult.ValidationFailed([.. validationErrors]);
|
||||
}
|
||||
|
||||
var existing = await _repository.GetByEventIdAsync(draft.TenantId, draft.EventId, cancellationToken).ConfigureAwait(false);
|
||||
if (existing is not null)
|
||||
{
|
||||
var canonicalJson = LedgerCanonicalJsonSerializer.Serialize(draft.CanonicalEnvelope);
|
||||
if (!string.Equals(existing.CanonicalJson, canonicalJson, StringComparison.Ordinal))
|
||||
{
|
||||
return LedgerWriteResult.Conflict(
|
||||
"event_id_conflict",
|
||||
$"Event '{draft.EventId}' already exists with a different payload.");
|
||||
}
|
||||
|
||||
return LedgerWriteResult.Idempotent(existing);
|
||||
}
|
||||
|
||||
var chainHead = await _repository.GetChainHeadAsync(draft.TenantId, draft.ChainId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var expectedSequence = chainHead is null ? 1 : chainHead.SequenceNumber + 1;
|
||||
if (draft.SequenceNumber != expectedSequence)
|
||||
{
|
||||
return LedgerWriteResult.Conflict(
|
||||
"sequence_mismatch",
|
||||
$"Sequence number '{draft.SequenceNumber}' does not match expected '{expectedSequence}'.");
|
||||
}
|
||||
|
||||
var previousHash = chainHead?.EventHash ?? LedgerEventConstants.EmptyHash;
|
||||
if (draft.ProvidedPreviousHash is not null && !string.Equals(draft.ProvidedPreviousHash, previousHash, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return LedgerWriteResult.Conflict(
|
||||
"previous_hash_mismatch",
|
||||
$"Provided previous hash '{draft.ProvidedPreviousHash}' does not match chain head hash '{previousHash}'.");
|
||||
}
|
||||
|
||||
var canonicalEnvelope = LedgerCanonicalJsonSerializer.Canonicalize(draft.CanonicalEnvelope);
|
||||
var hashResult = LedgerHashing.ComputeHashes(canonicalEnvelope, draft.SequenceNumber);
|
||||
|
||||
var eventBody = (JsonObject)canonicalEnvelope.DeepClone();
|
||||
var record = new LedgerEventRecord(
|
||||
draft.TenantId,
|
||||
draft.ChainId,
|
||||
draft.SequenceNumber,
|
||||
draft.EventId,
|
||||
draft.EventType,
|
||||
draft.PolicyVersion,
|
||||
draft.FindingId,
|
||||
draft.ArtifactId,
|
||||
draft.SourceRunId,
|
||||
draft.ActorId,
|
||||
draft.ActorType,
|
||||
draft.OccurredAt,
|
||||
draft.RecordedAt,
|
||||
eventBody,
|
||||
hashResult.EventHash,
|
||||
previousHash,
|
||||
hashResult.MerkleLeafHash,
|
||||
hashResult.CanonicalJson);
|
||||
|
||||
try
|
||||
{
|
||||
await _repository.AppendAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
await _merkleAnchorScheduler.EnqueueAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (IsDuplicateKeyException(ex))
|
||||
{
|
||||
_logger.LogWarning(ex, "Ledger append detected concurrent duplicate for {EventId}", draft.EventId);
|
||||
var persisted = await _repository.GetByEventIdAsync(draft.TenantId, draft.EventId, cancellationToken).ConfigureAwait(false);
|
||||
if (persisted is null)
|
||||
{
|
||||
return LedgerWriteResult.Conflict("append_failed", "Ledger append failed due to concurrent write.");
|
||||
}
|
||||
|
||||
if (!string.Equals(persisted.CanonicalJson, record.CanonicalJson, StringComparison.Ordinal))
|
||||
{
|
||||
return LedgerWriteResult.Conflict("event_id_conflict", "Ledger append raced with conflicting payload.");
|
||||
}
|
||||
|
||||
return LedgerWriteResult.Idempotent(persisted);
|
||||
}
|
||||
|
||||
return LedgerWriteResult.Success(record);
|
||||
}
|
||||
|
||||
private static bool IsDuplicateKeyException(Exception exception)
|
||||
{
|
||||
if (exception is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (exception is LedgerDuplicateEventException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (exception.GetType().Name.Contains("Unique", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (exception.InnerException is not null)
|
||||
{
|
||||
return IsDuplicateKeyException(exception.InnerException);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static List<string> ValidateDraft(LedgerEventDraft draft)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
if (draft is null)
|
||||
{
|
||||
errors.Add("draft_required");
|
||||
return errors;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(draft.TenantId))
|
||||
{
|
||||
errors.Add("tenant_id_required");
|
||||
}
|
||||
|
||||
if (draft.SequenceNumber < 1)
|
||||
{
|
||||
errors.Add("sequence_must_be_positive");
|
||||
}
|
||||
|
||||
if (draft.EventId == Guid.Empty)
|
||||
{
|
||||
errors.Add("event_id_required");
|
||||
}
|
||||
|
||||
if (draft.ChainId == Guid.Empty)
|
||||
{
|
||||
errors.Add("chain_id_required");
|
||||
}
|
||||
|
||||
if (!LedgerEventConstants.SupportedEventTypes.Contains(draft.EventType))
|
||||
{
|
||||
errors.Add($"event_type_invalid:{draft.EventType}");
|
||||
}
|
||||
|
||||
if (!LedgerEventConstants.SupportedActorTypes.Contains(draft.ActorType))
|
||||
{
|
||||
errors.Add($"actor_type_invalid:{draft.ActorType}");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(draft.PolicyVersion))
|
||||
{
|
||||
errors.Add("policy_version_required");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(draft.FindingId))
|
||||
{
|
||||
errors.Add("finding_id_required");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(draft.ArtifactId))
|
||||
{
|
||||
errors.Add("artifact_id_required");
|
||||
}
|
||||
|
||||
if (draft.Payload is null)
|
||||
{
|
||||
errors.Add("payload_required");
|
||||
}
|
||||
|
||||
if (draft.CanonicalEnvelope is null)
|
||||
{
|
||||
errors.Add("canonical_envelope_required");
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user