Add PHP Analyzer Plugin and Composer Lock Data Handling
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Implemented the PhpAnalyzerPlugin to analyze PHP projects.
- Created ComposerLockData class to represent data from composer.lock files.
- Developed ComposerLockReader to load and parse composer.lock files asynchronously.
- Introduced ComposerPackage class to encapsulate package details.
- Added PhpPackage class to represent PHP packages with metadata and evidence.
- Implemented PhpPackageCollector to gather packages from ComposerLockData.
- Created PhpLanguageAnalyzer to perform analysis and emit results.
- Added capability signals for known PHP frameworks and CMS.
- Developed unit tests for the PHP language analyzer and its components.
- Included sample composer.lock and expected output for testing.
- Updated project files for the new PHP analyzer library and tests.
This commit is contained in:
StellaOps Bot
2025-11-22 14:02:49 +02:00
parent a7f3c7869a
commit b6b9ffc050
158 changed files with 16272 additions and 809 deletions

View File

@@ -0,0 +1,98 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
using StellaOps.Findings.Ledger.Infrastructure.Exports;
using StellaOps.Findings.Ledger.Infrastructure.InMemory;
using StellaOps.Findings.Ledger.Infrastructure.Merkle;
using StellaOps.Findings.Ledger.Services;
using Xunit;
namespace StellaOps.Findings.Ledger.Tests;
public sealed class AirgapAndOrchestratorServiceTests
{
[Fact]
public async Task AirgapImportService_AppendsLedgerEvent_AndPersistsRecord()
{
var ledgerRepo = new InMemoryLedgerEventRepository();
var writeService = new LedgerEventWriteService(ledgerRepo, new NullMerkleAnchorScheduler(), NullLogger<LedgerEventWriteService>.Instance);
var store = new InMemoryAirgapImportRepository();
var service = new AirgapImportService(ledgerRepo, writeService, store, TimeProvider.System, NullLogger<AirgapImportService>.Instance);
var input = new AirgapImportInput(
TenantId: "tenant-a",
BundleId: "bundle-123",
MirrorGeneration: "gen-1",
MerkleRoot: "abc123",
TimeAnchor: DateTimeOffset.Parse("2025-10-10T00:00:00Z"),
Publisher: "mirror",
HashAlgorithm: "sha256",
Contents: new[] { "c1", "c2" },
ImportOperator: "operator:alice");
var result = await service.RecordAsync(input, CancellationToken.None);
Assert.True(result.Success);
Assert.NotNull(result.LedgerEventId);
Assert.NotNull(store.LastRecord);
Assert.Equal(input.BundleId, store.LastRecord!.BundleId);
Assert.Equal(input.MirrorGeneration, store.LastRecord.MirrorGeneration);
}
[Fact]
public async Task OrchestratorExportService_ComputesMerkleRoot()
{
var repo = new InMemoryOrchestratorExportRepository();
var service = new OrchestratorExportService(repo, TimeProvider.System, NullLogger<OrchestratorExportService>.Instance);
var input = new OrchestratorExportInput(
TenantId: "tenant-a",
RunId: Guid.NewGuid(),
JobType: "export-artifact",
ArtifactHash: "sha256:artifact",
PolicyHash: "sha256:policy",
StartedAt: DateTimeOffset.Parse("2025-10-11T00:00:00Z"),
CompletedAt: DateTimeOffset.Parse("2025-10-11T00:10:00Z"),
Status: "succeeded",
ManifestPath: "/exports/manifest.json",
LogsPath: "/exports/logs.txt");
var record = await service.RecordAsync(input, CancellationToken.None);
Assert.NotNull(record);
Assert.False(string.IsNullOrWhiteSpace(record.MerkleRoot));
Assert.Equal(record.MerkleRoot, repo.LastRecord?.MerkleRoot);
Assert.Equal(input.ArtifactHash, repo.LastRecord?.ArtifactHash);
}
private sealed class InMemoryAirgapImportRepository : IAirgapImportRepository
{
public AirgapImportRecord? LastRecord { get; private set; }
public Task InsertAsync(AirgapImportRecord record, CancellationToken cancellationToken)
{
LastRecord = record;
return Task.CompletedTask;
}
}
private sealed class InMemoryOrchestratorExportRepository : IOrchestratorExportRepository
{
public OrchestratorExportRecord? LastRecord { get; private set; }
public Task InsertAsync(OrchestratorExportRecord record, CancellationToken cancellationToken)
{
LastRecord = record;
return Task.CompletedTask;
}
public Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken)
{
var list = new List<OrchestratorExportRecord>();
if (LastRecord is not null && string.Equals(LastRecord.ArtifactHash, artifactHash, StringComparison.Ordinal))
{
list.Add(LastRecord);
}
return Task.FromResult<IReadOnlyList<OrchestratorExportRecord>>(list);
}
}
}

View File

@@ -0,0 +1,37 @@
using System.Text.Json.Serialization;
namespace StellaOps.Findings.Ledger.WebService.Contracts;
public sealed record AirgapImportRequest
{
[JsonPropertyName("bundleId")]
public required string BundleId { get; init; }
[JsonPropertyName("mirrorGeneration")]
public string? MirrorGeneration { get; init; }
[JsonPropertyName("merkleRoot")]
public required string MerkleRoot { get; init; }
[JsonPropertyName("timeAnchor")]
public required DateTimeOffset TimeAnchor { get; init; }
[JsonPropertyName("publisher")]
public string? Publisher { get; init; }
[JsonPropertyName("hashAlgorithm")]
public string? HashAlgorithm { get; init; }
[JsonPropertyName("contents")]
public string[] Contents { get; init; } = Array.Empty<string>();
[JsonPropertyName("importOperator")]
public string? ImportOperator { get; init; }
}
public sealed record AirgapImportResponse(
Guid ChainId,
long? Sequence,
Guid? LedgerEventId,
string Status,
string? Error);

View File

@@ -0,0 +1,37 @@
using System.Text.Json.Serialization;
namespace StellaOps.Findings.Ledger.WebService.Contracts;
public sealed record OrchestratorExportRequest
{
[JsonPropertyName("runId")]
public required Guid RunId { get; init; }
[JsonPropertyName("jobType")]
public required string JobType { get; init; }
[JsonPropertyName("artifactHash")]
public required string ArtifactHash { get; init; }
[JsonPropertyName("policyHash")]
public required string PolicyHash { get; init; }
[JsonPropertyName("startedAt")]
public required DateTimeOffset StartedAt { get; init; }
[JsonPropertyName("completedAt")]
public DateTimeOffset? CompletedAt { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("manifestPath")]
public string? ManifestPath { get; init; }
[JsonPropertyName("logsPath")]
public string? LogsPath { get; init; }
}
public sealed record OrchestratorExportResponse(
Guid RunId,
string MerkleRoot);

View File

@@ -12,6 +12,7 @@ using StellaOps.Configuration;
using StellaOps.DependencyInjection;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
using StellaOps.Findings.Ledger.Infrastructure.Merkle;
using StellaOps.Findings.Ledger.Infrastructure.Postgres;
using StellaOps.Findings.Ledger.Infrastructure.Projection;
@@ -140,6 +141,10 @@ builder.Services.AddSingleton<PolicyEngineEvaluationService>();
builder.Services.AddSingleton<IPolicyEvaluationService>(sp => sp.GetRequiredService<PolicyEngineEvaluationService>());
builder.Services.AddSingleton<ILedgerEventWriteService, LedgerEventWriteService>();
builder.Services.AddSingleton<IFindingWorkflowService, FindingWorkflowService>();
builder.Services.AddSingleton<IOrchestratorExportRepository, PostgresOrchestratorExportRepository>();
builder.Services.AddSingleton<OrchestratorExportService>();
builder.Services.AddSingleton<IAirgapImportRepository, PostgresAirgapImportRepository>();
builder.Services.AddSingleton<AirgapImportService>();
builder.Services.AddSingleton<IAttachmentEncryptionService, AttachmentEncryptionService>();
builder.Services.AddSingleton<IAttachmentUrlSigner, AttachmentUrlSigner>();
builder.Services.AddSingleton<IConsoleCsrfValidator, ConsoleCsrfValidator>();
@@ -300,6 +305,95 @@ app.MapGet("/ledger/export/sboms", () => TypedResults.Json(new ExportPage<SbomEx
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK);
app.MapPost("/internal/ledger/orchestrator-export", async Task<Results<Accepted<OrchestratorExportResponse>, ProblemHttpResult>> (
HttpContext httpContext,
OrchestratorExportRequest request,
OrchestratorExportService service,
CancellationToken cancellationToken) =>
{
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
{
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
}
var tenantId = tenantValues.ToString();
var input = new OrchestratorExportInput(
tenantId,
request.RunId,
request.JobType,
request.ArtifactHash,
request.PolicyHash,
request.StartedAt,
request.CompletedAt,
request.Status,
request.ManifestPath,
request.LogsPath);
var record = await service.RecordAsync(input, cancellationToken).ConfigureAwait(false);
var response = new OrchestratorExportResponse(record.RunId, record.MerkleRoot);
return TypedResults.Accepted($"/internal/ledger/orchestrator-export/{record.RunId}", response);
})
.WithName("OrchestratorExportRecord")
.RequireAuthorization(LedgerWritePolicy)
.Produces(StatusCodes.Status202Accepted)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapGet("/internal/ledger/orchestrator-export/{artifactHash}", async Task<Results<JsonHttpResult<IReadOnlyList<OrchestratorExportRecord>>, ProblemHttpResult>> (
HttpContext httpContext,
string artifactHash,
OrchestratorExportService service,
CancellationToken cancellationToken) =>
{
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
{
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
}
var records = await service.GetByArtifactAsync(tenantValues.ToString(), artifactHash, cancellationToken).ConfigureAwait(false);
return TypedResults.Json(records);
})
.WithName("OrchestratorExportQuery")
.RequireAuthorization(LedgerExportPolicy)
.Produces(StatusCodes.Status200OK)
.ProducesProblem(StatusCodes.Status400BadRequest);
app.MapPost("/internal/ledger/airgap-import", async Task<Results<Accepted<AirgapImportResponse>, ProblemHttpResult>> (
HttpContext httpContext,
AirgapImportRequest request,
AirgapImportService service,
CancellationToken cancellationToken) =>
{
if (!httpContext.Request.Headers.TryGetValue("X-Stella-Tenant", out var tenantValues) || string.IsNullOrWhiteSpace(tenantValues))
{
return TypedResults.Problem(statusCode: StatusCodes.Status400BadRequest, title: "missing_tenant");
}
var input = new AirgapImportInput(
tenantValues.ToString(),
request.BundleId,
request.MirrorGeneration,
request.MerkleRoot,
request.TimeAnchor,
request.Publisher,
request.HashAlgorithm,
request.Contents ?? Array.Empty<string>(),
request.ImportOperator);
var result = await service.RecordAsync(input, cancellationToken).ConfigureAwait(false);
if (!result.Success)
{
return TypedResults.Problem(statusCode: StatusCodes.Status409Conflict, title: "airgap_import_failed", detail: result.Error ?? "Failed to record air-gap import.");
}
var response = new AirgapImportResponse(result.ChainId, result.SequenceNumber, result.LedgerEventId, "accepted", null);
return TypedResults.Accepted($"/internal/ledger/airgap-import/{request.BundleId}", response);
})
.WithName("AirgapImportRecord")
.RequireAuthorization(LedgerWritePolicy)
.Produces(StatusCodes.Status202Accepted)
.ProducesProblem(StatusCodes.Status400BadRequest)
.ProducesProblem(StatusCodes.Status409Conflict);
app.Run();
static Created<LedgerEventResponse> CreateCreatedResponse(LedgerEventRecord record)

View File

@@ -214,7 +214,7 @@ public sealed class AttestationQueryService
sqlBuilder.Append(" LIMIT @take");
parameters.Add(new NpgsqlParameter<int>("take", request.Limit + 1) { NpgsqlDbType = NpgsqlDbType.Integer });
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, "attestation", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds

View File

@@ -168,7 +168,7 @@ public sealed class ExportQueryService
NpgsqlDbType = NpgsqlDbType.Integer
});
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, "export", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds

View File

@@ -7,10 +7,15 @@ public static class LedgerChainIdGenerator
{
public static Guid FromTenantPolicy(string tenantId, string policyVersion)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(policyVersion);
return FromTenantSubject(tenantId, policyVersion);
}
var normalized = $"{tenantId.Trim()}::{policyVersion.Trim()}";
public static Guid FromTenantSubject(string tenantId, string subject)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(subject);
var normalized = $"{tenantId.Trim()}::{subject.Trim()}";
var bytes = Encoding.UTF8.GetBytes(normalized);
Span<byte> guidBytes = stackalloc byte[16];
var hash = SHA256.HashData(bytes);

View File

@@ -14,8 +14,24 @@ public static class LedgerEventConstants
public const string EventFindingRemediationPlanAdded = "finding.remediation_plan_added";
public const string EventFindingAttachmentAdded = "finding.attachment_added";
public const string EventFindingClosed = "finding.closed";
public const string EventAirgapBundleImported = "airgap.bundle_imported";
public const string EventOrchestratorExportRecorded = "orchestrator.export_recorded";
public static readonly ImmutableHashSet<string> SupportedEventTypes = ImmutableHashSet.Create(StringComparer.Ordinal,
EventFindingCreated,
EventFindingStatusChanged,
EventFindingSeverityChanged,
EventFindingTagUpdated,
EventFindingCommentAdded,
EventFindingAssignmentChanged,
EventFindingAcceptedRisk,
EventFindingRemediationPlanAdded,
EventFindingAttachmentAdded,
EventFindingClosed,
EventAirgapBundleImported,
EventOrchestratorExportRecorded);
public static readonly ImmutableHashSet<string> FindingEventTypes = ImmutableHashSet.Create(StringComparer.Ordinal,
EventFindingCreated,
EventFindingStatusChanged,
EventFindingSeverityChanged,
@@ -33,4 +49,6 @@ public static class LedgerEventConstants
"integration");
public const string EmptyHash = "0000000000000000000000000000000000000000000000000000000000000000";
public static bool IsFindingEvent(string eventType) => FindingEventTypes.Contains(eventType);
}

View File

@@ -8,6 +8,11 @@ public sealed record FindingProjection(
string PolicyVersion,
string Status,
decimal? Severity,
decimal? RiskScore,
string? RiskSeverity,
string? RiskProfileVersion,
Guid? RiskExplanationId,
long? RiskEventSequence,
JsonObject Labels,
Guid CurrentEventId,
string? ExplainRef,

View File

@@ -0,0 +1,16 @@
using System.Text.Json.Nodes;
namespace StellaOps.Findings.Ledger.Infrastructure.AirGap;
public sealed record AirgapImportRecord(
string TenantId,
string BundleId,
string? MirrorGeneration,
string MerkleRoot,
DateTimeOffset TimeAnchor,
string? Publisher,
string? HashAlgorithm,
JsonArray Contents,
DateTimeOffset ImportedAt,
string? ImportOperator,
Guid? LedgerEventId);

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Findings.Ledger.Infrastructure.AirGap;
public interface IAirgapImportRepository
{
Task InsertAsync(AirgapImportRecord record, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Exports;
public interface IOrchestratorExportRepository
{
Task InsertAsync(OrchestratorExportRecord record, CancellationToken cancellationToken);
Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.Findings.Ledger.Infrastructure.Exports;
public sealed record OrchestratorExportRecord(
string TenantId,
Guid RunId,
string JobType,
string ArtifactHash,
string PolicyHash,
DateTimeOffset StartedAt,
DateTimeOffset? CompletedAt,
string Status,
string? ManifestPath,
string? LogsPath,
string MerkleRoot,
DateTimeOffset CreatedAt);

View File

@@ -1,5 +1,6 @@
using System.Threading.Channels;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Infrastructure.Merkle;
@@ -18,7 +19,11 @@ public sealed class LedgerAnchorQueue
}
public ValueTask EnqueueAsync(LedgerEventRecord record, CancellationToken cancellationToken)
=> _channel.Writer.WriteAsync(record, cancellationToken);
{
var writeTask = _channel.Writer.WriteAsync(record, cancellationToken);
LedgerMetrics.IncrementBacklog();
return writeTask;
}
public IAsyncEnumerable<LedgerEventRecord> ReadAllAsync(CancellationToken cancellationToken)
=> _channel.Reader.ReadAllAsync(cancellationToken);

View File

@@ -1,8 +1,10 @@
using System.Collections.Concurrent;
using System.Diagnostics;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Observability;
using StellaOps.Findings.Ledger.Options;
using TimeProvider = System.TimeProvider;
@@ -35,6 +37,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
{
await foreach (var record in _queue.ReadAllAsync(stoppingToken))
{
LedgerMetrics.DecrementBacklog();
await HandleEventAsync(record, stoppingToken).ConfigureAwait(false);
}
}
@@ -80,6 +83,7 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
try
{
var stopwatch = Stopwatch.StartNew();
var orderedEvents = batch.Events
.OrderBy(e => e.SequenceNumber)
.ThenBy(e => e.RecordedAt)
@@ -106,10 +110,13 @@ public sealed class LedgerMerkleAnchorWorker : BackgroundService
anchoredAt,
anchorReference: null,
cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
LedgerMetrics.RecordMerkleAnchorDuration(stopwatch.Elapsed, tenantId, leafCount);
}
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
{
_logger.LogError(ex, "Failed to persist Merkle anchor for tenant {TenantId}.", tenantId);
LedgerMetrics.RecordMerkleAnchorFailure(tenantId, ex.GetType().Name);
}
}

View File

@@ -1,6 +1,8 @@
using System.Data;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Npgsql;
using StellaOps.Findings.Ledger.Observability;
using StellaOps.Findings.Ledger.Options;
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
@@ -31,15 +33,26 @@ public sealed class LedgerDataSource : IAsyncDisposable
}
public Task<NpgsqlConnection> OpenConnectionAsync(string tenantId, CancellationToken cancellationToken)
=> OpenConnectionInternalAsync(tenantId, cancellationToken);
=> OpenConnectionInternalAsync(tenantId, "unspecified", cancellationToken);
private async Task<NpgsqlConnection> OpenConnectionInternalAsync(string tenantId, CancellationToken cancellationToken)
public Task<NpgsqlConnection> OpenConnectionAsync(string tenantId, string role, CancellationToken cancellationToken)
=> OpenConnectionInternalAsync(tenantId, role, cancellationToken);
private async Task<NpgsqlConnection> OpenConnectionInternalAsync(string tenantId, string role, CancellationToken cancellationToken)
{
var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
try
{
await ConfigureSessionAsync(connection, tenantId, cancellationToken).ConfigureAwait(false);
LedgerMetrics.ConnectionOpened(role);
connection.StateChange += (_, args) =>
{
if (args.CurrentState == ConnectionState.Closed)
{
LedgerMetrics.ConnectionClosed(role);
}
};
}
catch
{

View File

@@ -0,0 +1,94 @@
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Findings.Ledger.Hashing;
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
public sealed class PostgresAirgapImportRepository : IAirgapImportRepository
{
private const string InsertSql = """
INSERT INTO airgap_imports (
tenant_id,
bundle_id,
mirror_generation,
merkle_root,
time_anchor,
publisher,
hash_algorithm,
contents,
imported_at,
import_operator,
ledger_event_id)
VALUES (
@tenant_id,
@bundle_id,
@mirror_generation,
@merkle_root,
@time_anchor,
@publisher,
@hash_algorithm,
@contents,
@imported_at,
@import_operator,
@ledger_event_id)
ON CONFLICT (tenant_id, bundle_id, time_anchor)
DO UPDATE SET
merkle_root = EXCLUDED.merkle_root,
publisher = EXCLUDED.publisher,
hash_algorithm = EXCLUDED.hash_algorithm,
contents = EXCLUDED.contents,
imported_at = EXCLUDED.imported_at,
import_operator = EXCLUDED.import_operator,
ledger_event_id = EXCLUDED.ledger_event_id;
""";
private readonly LedgerDataSource _dataSource;
private readonly ILogger<PostgresAirgapImportRepository> _logger;
public PostgresAirgapImportRepository(
LedgerDataSource dataSource,
ILogger<PostgresAirgapImportRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task InsertAsync(AirgapImportRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
var canonicalContents = LedgerCanonicalJsonSerializer.Canonicalize(record.Contents);
var contentsJson = canonicalContents.ToJsonString();
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "airgap-import", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(InsertSql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.Add(new NpgsqlParameter<string>("tenant_id", record.TenantId) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("bundle_id", record.BundleId) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string?>("mirror_generation", record.MirrorGeneration) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("merkle_root", record.MerkleRoot) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("time_anchor", record.TimeAnchor) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
command.Parameters.Add(new NpgsqlParameter<string?>("publisher", record.Publisher) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string?>("hash_algorithm", record.HashAlgorithm) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("contents", contentsJson) { NpgsqlDbType = NpgsqlDbType.Jsonb });
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("imported_at", record.ImportedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
command.Parameters.Add(new NpgsqlParameter<string?>("import_operator", record.ImportOperator) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<Guid?>("ledger_event_id", record.LedgerEventId) { NpgsqlDbType = NpgsqlDbType.Uuid });
try
{
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
catch (PostgresException ex)
{
_logger.LogError(ex, "Failed to insert air-gap import for tenant {TenantId} bundle {BundleId}.", record.TenantId, record.BundleId);
throw;
}
}
}

View File

@@ -12,6 +12,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
private const string GetProjectionSql = """
SELECT status,
severity,
risk_score,
risk_severity,
risk_profile_version,
risk_explanation_id,
risk_event_sequence,
labels,
current_event_id,
explain_ref,
@@ -31,6 +36,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
policy_version,
status,
severity,
risk_score,
risk_severity,
risk_profile_version,
risk_explanation_id,
risk_event_sequence,
labels,
current_event_id,
explain_ref,
@@ -43,6 +53,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
@policy_version,
@status,
@severity,
@risk_score,
@risk_severity,
@risk_profile_version,
@risk_explanation_id,
@risk_event_sequence,
@labels,
@current_event_id,
@explain_ref,
@@ -53,6 +68,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
DO UPDATE SET
status = EXCLUDED.status,
severity = EXCLUDED.severity,
risk_score = EXCLUDED.risk_score,
risk_severity = EXCLUDED.risk_severity,
risk_profile_version = EXCLUDED.risk_profile_version,
risk_explanation_id = EXCLUDED.risk_explanation_id,
risk_event_sequence = EXCLUDED.risk_event_sequence,
labels = EXCLUDED.labels,
current_event_id = EXCLUDED.current_event_id,
explain_ref = EXCLUDED.explain_ref,
@@ -153,7 +173,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
public async Task<FindingProjection?> GetAsync(string tenantId, string findingId, string policyVersion, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(GetProjectionSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
@@ -168,11 +188,16 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
var status = reader.GetString(0);
var severity = reader.IsDBNull(1) ? (decimal?)null : reader.GetDecimal(1);
var labelsJson = reader.GetFieldValue<string>(2);
var riskScore = reader.IsDBNull(2) ? (decimal?)null : reader.GetDecimal(2);
var riskSeverity = reader.IsDBNull(3) ? null : reader.GetString(3);
var riskProfileVersion = reader.IsDBNull(4) ? null : reader.GetString(4);
var riskExplanationId = reader.IsDBNull(5) ? (Guid?)null : reader.GetGuid(5);
var riskEventSequence = reader.IsDBNull(6) ? (long?)null : reader.GetInt64(6);
var labelsJson = reader.GetFieldValue<string>(7);
var labels = JsonNode.Parse(labelsJson)?.AsObject() ?? new JsonObject();
var currentEventId = reader.GetGuid(3);
var explainRef = reader.IsDBNull(4) ? null : reader.GetString(4);
var rationaleJson = reader.IsDBNull(5) ? string.Empty : reader.GetFieldValue<string>(5);
var currentEventId = reader.GetGuid(8);
var explainRef = reader.IsDBNull(9) ? null : reader.GetString(9);
var rationaleJson = reader.IsDBNull(10) ? string.Empty : reader.GetFieldValue<string>(10);
JsonArray rationale;
if (string.IsNullOrWhiteSpace(rationaleJson))
{
@@ -182,8 +207,8 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
{
rationale = JsonNode.Parse(rationaleJson) as JsonArray ?? new JsonArray();
}
var updatedAt = reader.GetFieldValue<DateTimeOffset>(6);
var cycleHash = reader.GetString(7);
var updatedAt = reader.GetFieldValue<DateTimeOffset>(11);
var cycleHash = reader.GetString(12);
return new FindingProjection(
tenantId,
@@ -191,6 +216,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
policyVersion,
status,
severity,
riskScore,
riskSeverity,
riskProfileVersion,
riskExplanationId,
riskEventSequence,
labels,
currentEventId,
explainRef,
@@ -203,7 +233,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
{
ArgumentNullException.ThrowIfNull(projection);
await using var connection = await _dataSource.OpenConnectionAsync(projection.TenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(projection.TenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(UpsertProjectionSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
@@ -212,6 +242,11 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
command.Parameters.AddWithValue("policy_version", projection.PolicyVersion);
command.Parameters.AddWithValue("status", projection.Status);
command.Parameters.AddWithValue("severity", projection.Severity.HasValue ? projection.Severity.Value : (object)DBNull.Value);
command.Parameters.AddWithValue("risk_score", projection.RiskScore.HasValue ? projection.RiskScore.Value : (object)DBNull.Value);
command.Parameters.AddWithValue("risk_severity", projection.RiskSeverity ?? (object)DBNull.Value);
command.Parameters.AddWithValue("risk_profile_version", projection.RiskProfileVersion ?? (object)DBNull.Value);
command.Parameters.AddWithValue("risk_explanation_id", projection.RiskExplanationId ?? (object)DBNull.Value);
command.Parameters.AddWithValue("risk_event_sequence", projection.RiskEventSequence.HasValue ? projection.RiskEventSequence.Value : (object)DBNull.Value);
var labelsCanonical = LedgerCanonicalJsonSerializer.Canonicalize(projection.Labels);
var labelsJson = labelsCanonical.ToJsonString();
@@ -233,7 +268,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
{
ArgumentNullException.ThrowIfNull(entry);
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(InsertHistorySql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
@@ -254,7 +289,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
{
ArgumentNullException.ThrowIfNull(entry);
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(entry.TenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(InsertActionSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
@@ -275,7 +310,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
public async Task<ProjectionCheckpoint> GetCheckpointAsync(CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectCheckpointSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("worker_id", DefaultWorkerId);
@@ -296,7 +331,7 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
{
ArgumentNullException.ThrowIfNull(checkpoint);
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(UpsertCheckpointSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;

View File

@@ -96,7 +96,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
public async Task<LedgerEventRecord?> GetByEventIdAsync(string tenantId, Guid eventId, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer-read", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectByEventIdSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
@@ -113,7 +113,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
public async Task<LedgerChainHead?> GetChainHeadAsync(string tenantId, Guid chainId, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer-read", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectChainHeadSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
@@ -133,7 +133,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
public async Task AppendAsync(LedgerEventRecord record, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "writer", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(InsertEventSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
@@ -236,7 +236,7 @@ public sealed class PostgresLedgerEventRepository : ILedgerEventRepository
ORDER BY recorded_at DESC
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer-read", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);

View File

@@ -57,7 +57,7 @@ public sealed class PostgresLedgerEventStream : ILedgerEventStream
var records = new List<LedgerEventRecord>(batchSize);
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(string.Empty, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(ReadEventsSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("last_recorded_at", checkpoint.LastRecordedAt);

View File

@@ -55,7 +55,7 @@ public sealed class PostgresMerkleAnchorRepository : IMerkleAnchorRepository
string? anchorReference,
CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, cancellationToken).ConfigureAwait(false);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "anchor", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(InsertAnchorSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;

View File

@@ -0,0 +1,146 @@
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Findings.Ledger.Infrastructure.Exports;
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
public sealed class PostgresOrchestratorExportRepository : IOrchestratorExportRepository
{
private const string UpsertSql = """
INSERT INTO orchestrator_exports (
tenant_id,
run_id,
job_type,
artifact_hash,
policy_hash,
started_at,
completed_at,
status,
manifest_path,
logs_path,
merkle_root,
created_at)
VALUES (
@tenant_id,
@run_id,
@job_type,
@artifact_hash,
@policy_hash,
@started_at,
@completed_at,
@status,
@manifest_path,
@logs_path,
@merkle_root,
@created_at)
ON CONFLICT (tenant_id, run_id)
DO UPDATE SET
job_type = EXCLUDED.job_type,
artifact_hash = EXCLUDED.artifact_hash,
policy_hash = EXCLUDED.policy_hash,
started_at = EXCLUDED.started_at,
completed_at = EXCLUDED.completed_at,
status = EXCLUDED.status,
manifest_path = EXCLUDED.manifest_path,
logs_path = EXCLUDED.logs_path,
merkle_root = EXCLUDED.merkle_root,
created_at = EXCLUDED.created_at;
""";
private const string SelectByArtifactSql = """
SELECT run_id,
job_type,
artifact_hash,
policy_hash,
started_at,
completed_at,
status,
manifest_path,
logs_path,
merkle_root,
created_at
FROM orchestrator_exports
WHERE tenant_id = @tenant_id
AND artifact_hash = @artifact_hash
ORDER BY completed_at DESC NULLS LAST, started_at DESC;
""";
private readonly LedgerDataSource _dataSource;
private readonly ILogger<PostgresOrchestratorExportRepository> _logger;
public PostgresOrchestratorExportRepository(
LedgerDataSource dataSource,
ILogger<PostgresOrchestratorExportRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task InsertAsync(OrchestratorExportRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
await using var connection = await _dataSource.OpenConnectionAsync(record.TenantId, "orchestrator-export", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(UpsertSql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.Add(new NpgsqlParameter<string>("tenant_id", record.TenantId) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<Guid>("run_id", record.RunId) { NpgsqlDbType = NpgsqlDbType.Uuid });
command.Parameters.Add(new NpgsqlParameter<string>("job_type", record.JobType) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("artifact_hash", record.ArtifactHash) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("policy_hash", record.PolicyHash) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("started_at", record.StartedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset?>("completed_at", record.CompletedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
command.Parameters.Add(new NpgsqlParameter<string>("status", record.Status) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string?>("manifest_path", record.ManifestPath) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string?>("logs_path", record.LogsPath) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("merkle_root", record.MerkleRoot) { NpgsqlDbType = NpgsqlDbType.Char });
command.Parameters.Add(new NpgsqlParameter<DateTimeOffset>("created_at", record.CreatedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
try
{
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
catch (PostgresException ex)
{
_logger.LogError(ex, "Failed to upsert orchestrator export for tenant {TenantId} run {RunId}.", record.TenantId, record.RunId);
throw;
}
}
public async Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken)
{
var results = new List<OrchestratorExportRecord>();
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "orchestrator-export", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectByArtifactSql, connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.Add(new NpgsqlParameter<string>("tenant_id", tenantId) { NpgsqlDbType = NpgsqlDbType.Text });
command.Parameters.Add(new NpgsqlParameter<string>("artifact_hash", artifactHash) { NpgsqlDbType = NpgsqlDbType.Text });
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
results.Add(new OrchestratorExportRecord(
TenantId: tenantId,
RunId: reader.GetGuid(0),
JobType: reader.GetString(1),
ArtifactHash: reader.GetString(2),
PolicyHash: reader.GetString(3),
StartedAt: reader.GetFieldValue<DateTimeOffset>(4),
CompletedAt: reader.IsDBNull(5) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(5),
Status: reader.GetString(6),
ManifestPath: reader.IsDBNull(7) ? null : reader.GetString(7),
LogsPath: reader.IsDBNull(8) ? null : reader.GetString(8),
MerkleRoot: reader.GetString(9),
CreatedAt: reader.GetFieldValue<DateTimeOffset>(10)));
}
return results;
}
}

View File

@@ -74,6 +74,10 @@ public sealed class LedgerProjectionWorker : BackgroundService
continue;
}
var batchStopwatch = Stopwatch.StartNew();
var batchTenant = batch[0].TenantId;
var batchFailed = false;
foreach (var record in batch)
{
using var scope = _logger.BeginScope(new Dictionary<string, object?>
@@ -86,6 +90,19 @@ public sealed class LedgerProjectionWorker : BackgroundService
});
using var activity = LedgerTelemetry.StartProjectionApply(record);
var applyStopwatch = Stopwatch.StartNew();
if (!LedgerEventConstants.IsFindingEvent(record.EventType))
{
checkpoint = checkpoint with
{
LastRecordedAt = record.RecordedAt,
LastEventId = record.EventId,
UpdatedAt = _timeProvider.GetUtcNow()
};
await _repository.SaveCheckpointAsync(checkpoint, stoppingToken).ConfigureAwait(false);
_logger.LogInformation("Skipped non-finding ledger event {EventId} type {EventType} during projection.", record.EventId, record.EventType);
continue;
}
string? evaluationStatus = null;
try
@@ -131,10 +148,17 @@ public sealed class LedgerProjectionWorker : BackgroundService
{
LedgerTelemetry.MarkError(activity, "projection_failed");
_logger.LogError(ex, "Failed to project ledger event {EventId} for tenant {TenantId}.", record.EventId, record.TenantId);
batchFailed = true;
await DelayAsync(stoppingToken).ConfigureAwait(false);
break;
}
}
batchStopwatch.Stop();
if (!batchFailed)
{
LedgerMetrics.RecordProjectionRebuild(batchStopwatch.Elapsed, batchTenant, "replay");
}
}
}

View File

@@ -1,3 +1,4 @@
using System.Collections.Concurrent;
using System.Diagnostics.Metrics;
namespace StellaOps.Findings.Ledger.Observability;
@@ -6,10 +7,16 @@ internal static class LedgerMetrics
{
private static readonly Meter Meter = new("StellaOps.Findings.Ledger");
private static readonly Histogram<double> WriteDurationSeconds = Meter.CreateHistogram<double>(
"ledger_write_duration_seconds",
unit: "s",
description: "Latency of successful ledger append operations.");
// Compatibility with earlier drafts
private static readonly Histogram<double> WriteLatencySeconds = Meter.CreateHistogram<double>(
"ledger_write_latency_seconds",
unit: "s",
description: "Latency of successful ledger append operations.");
description: "Deprecated alias for ledger_write_duration_seconds.");
private static readonly Counter<long> EventsTotal = Meter.CreateCounter<long>(
"ledger_events_total",
@@ -20,15 +27,40 @@ internal static class LedgerMetrics
unit: "s",
description: "Duration to apply a ledger event to the finding projection.");
private static readonly Histogram<double> ProjectionLagSeconds = Meter.CreateHistogram<double>(
"ledger_projection_lag_seconds",
private static readonly Histogram<double> ProjectionRebuildSeconds = Meter.CreateHistogram<double>(
"ledger_projection_rebuild_seconds",
unit: "s",
description: "Lag between ledger recorded_at and projection application time.");
description: "Duration of projection replay/rebuild batches.");
private static readonly Counter<long> ProjectionEventsTotal = Meter.CreateCounter<long>(
"ledger_projection_events_total",
description: "Number of ledger events applied to projections.");
private static readonly Histogram<double> MerkleAnchorDurationSeconds = Meter.CreateHistogram<double>(
"ledger_merkle_anchor_duration_seconds",
unit: "s",
description: "Duration to persist Merkle anchor batches.");
private static readonly Counter<long> MerkleAnchorFailures = Meter.CreateCounter<long>(
"ledger_merkle_anchor_failures_total",
description: "Count of Merkle anchor failures by reason.");
private static readonly ObservableGauge<double> ProjectionLagGauge =
Meter.CreateObservableGauge("ledger_projection_lag_seconds", ObserveProjectionLag, unit: "s",
description: "Lag between ledger recorded_at and projection application time.");
private static readonly ObservableGauge<long> IngestBacklogGauge =
Meter.CreateObservableGauge("ledger_ingest_backlog_events", ObserveBacklog,
description: "Number of events buffered for ingestion/anchoring.");
private static readonly ObservableGauge<long> DbConnectionsGauge =
Meter.CreateObservableGauge("ledger_db_connections_active", ObserveDbConnections,
description: "Active PostgreSQL connections by role.");
private static readonly ConcurrentDictionary<string, double> ProjectionLagByTenant = new(StringComparer.Ordinal);
private static readonly ConcurrentDictionary<string, long> DbConnectionsByRole = new(StringComparer.OrdinalIgnoreCase);
private static long _ingestBacklog;
public static void RecordWriteSuccess(TimeSpan duration, string? tenantId, string? eventType, string? source)
{
var tags = new KeyValuePair<string, object?>[]
@@ -38,6 +70,7 @@ internal static class LedgerMetrics
new("source", source ?? string.Empty)
};
WriteDurationSeconds.Record(duration.TotalSeconds, tags);
WriteLatencySeconds.Record(duration.TotalSeconds, tags);
EventsTotal.Add(1, tags);
}
@@ -59,7 +92,90 @@ internal static class LedgerMetrics
};
ProjectionApplySeconds.Record(duration.TotalSeconds, tags);
ProjectionLagSeconds.Record(lagSeconds, tags);
ProjectionEventsTotal.Add(1, tags);
UpdateProjectionLag(tenantId, lagSeconds);
}
public static void RecordProjectionRebuild(TimeSpan duration, string? tenantId, string scenario)
{
var tags = new KeyValuePair<string, object?>[]
{
new("tenant", tenantId ?? string.Empty),
new("scenario", scenario)
};
ProjectionRebuildSeconds.Record(duration.TotalSeconds, tags);
}
public static void RecordMerkleAnchorDuration(TimeSpan duration, string tenantId, int leafCount)
{
var tags = new KeyValuePair<string, object?>[]
{
new("tenant", tenantId),
new("leaf_count", leafCount)
};
MerkleAnchorDurationSeconds.Record(duration.TotalSeconds, tags);
}
public static void RecordMerkleAnchorFailure(string tenantId, string reason)
{
var tags = new KeyValuePair<string, object?>[]
{
new("tenant", tenantId),
new("reason", reason)
};
MerkleAnchorFailures.Add(1, tags);
}
public static void IncrementBacklog() => Interlocked.Increment(ref _ingestBacklog);
public static void DecrementBacklog()
{
var value = Interlocked.Decrement(ref _ingestBacklog);
if (value < 0)
{
Interlocked.Exchange(ref _ingestBacklog, 0);
}
}
public static void ConnectionOpened(string role)
{
var normalized = NormalizeRole(role);
DbConnectionsByRole.AddOrUpdate(normalized, _ => 1, (_, current) => current + 1);
}
public static void ConnectionClosed(string role)
{
var normalized = NormalizeRole(role);
DbConnectionsByRole.AddOrUpdate(normalized, _ => 0, (_, current) => Math.Max(0, current - 1));
}
public static void UpdateProjectionLag(string? tenantId, double lagSeconds)
{
var key = string.IsNullOrWhiteSpace(tenantId) ? string.Empty : tenantId;
ProjectionLagByTenant[key] = lagSeconds < 0 ? 0 : lagSeconds;
}
private static IEnumerable<Measurement<double>> ObserveProjectionLag()
{
foreach (var kvp in ProjectionLagByTenant)
{
yield return new Measurement<double>(kvp.Value, new KeyValuePair<string, object?>("tenant", kvp.Key));
}
}
private static IEnumerable<Measurement<long>> ObserveBacklog()
{
yield return new Measurement<long>(Interlocked.Read(ref _ingestBacklog));
}
private static IEnumerable<Measurement<long>> ObserveDbConnections()
{
foreach (var kvp in DbConnectionsByRole)
{
yield return new Measurement<long>(kvp.Value, new KeyValuePair<string, object?>("role", kvp.Key));
}
}
private static string NormalizeRole(string role) => string.IsNullOrWhiteSpace(role) ? "unspecified" : role.ToLowerInvariant();
}

View File

@@ -1,6 +1,7 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure.Exports;
namespace StellaOps.Findings.Ledger.Observability;
@@ -12,6 +13,8 @@ internal static class LedgerTimeline
{
private static readonly EventId LedgerAppended = new(6101, "ledger.event.appended");
private static readonly EventId ProjectionUpdated = new(6201, "ledger.projection.updated");
private static readonly EventId OrchestratorExport = new(6301, "ledger.export.recorded");
private static readonly EventId AirgapImport = new(6401, "ledger.airgap.imported");
public static void EmitLedgerAppended(ILogger logger, LedgerEventRecord record, string? evidenceBundleRef = null)
{
@@ -62,4 +65,38 @@ internal static class LedgerTimeline
traceId,
evidenceBundleRef ?? record.EvidenceBundleReference ?? string.Empty);
}
public static void EmitOrchestratorExport(ILogger logger, OrchestratorExportRecord record)
{
if (logger is null)
{
return;
}
logger.LogInformation(
OrchestratorExport,
"timeline ledger.export.recorded tenant={Tenant} run={RunId} artifact={ArtifactHash} policy={PolicyHash} status={Status} merkle_root={MerkleRoot}",
record.TenantId,
record.RunId,
record.ArtifactHash,
record.PolicyHash,
record.Status,
record.MerkleRoot);
}
public static void EmitAirgapImport(ILogger logger, string tenantId, string bundleId, string merkleRoot, Guid? ledgerEventId)
{
if (logger is null)
{
return;
}
logger.LogInformation(
AirgapImport,
"timeline ledger.airgap.imported tenant={Tenant} bundle={BundleId} merkle_root={MerkleRoot} ledger_event={LedgerEvent}",
tenantId,
bundleId,
merkleRoot,
ledgerEventId?.ToString() ?? string.Empty);
}
}

View File

@@ -0,0 +1,152 @@
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Infrastructure.AirGap;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Services;
public sealed record AirgapImportInput(
string TenantId,
string BundleId,
string? MirrorGeneration,
string MerkleRoot,
DateTimeOffset TimeAnchor,
string? Publisher,
string? HashAlgorithm,
IReadOnlyList<string> Contents,
string? ImportOperator);
public sealed record AirgapImportResult(
bool Success,
Guid ChainId,
long? SequenceNumber,
Guid? LedgerEventId,
string? Error);
public sealed class AirgapImportService
{
private readonly ILedgerEventRepository _ledgerEventRepository;
private readonly ILedgerEventWriteService _writeService;
private readonly IAirgapImportRepository _repository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<AirgapImportService> _logger;
public AirgapImportService(
ILedgerEventRepository ledgerEventRepository,
ILedgerEventWriteService writeService,
IAirgapImportRepository repository,
TimeProvider timeProvider,
ILogger<AirgapImportService> logger)
{
_ledgerEventRepository = ledgerEventRepository ?? throw new ArgumentNullException(nameof(ledgerEventRepository));
_writeService = writeService ?? throw new ArgumentNullException(nameof(writeService));
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<AirgapImportResult> RecordAsync(AirgapImportInput input, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(input);
var chainId = LedgerChainIdGenerator.FromTenantSubject(input.TenantId, $"airgap::{input.BundleId}");
var chainHead = await _ledgerEventRepository.GetChainHeadAsync(input.TenantId, chainId, cancellationToken).ConfigureAwait(false);
var sequence = (chainHead?.SequenceNumber ?? 0) + 1;
var previousHash = chainHead?.EventHash ?? LedgerEventConstants.EmptyHash;
var eventId = Guid.NewGuid();
var recordedAt = _timeProvider.GetUtcNow();
var payload = new JsonObject
{
["airgap"] = new JsonObject
{
["bundleId"] = input.BundleId,
["mirrorGeneration"] = input.MirrorGeneration,
["merkleRoot"] = input.MerkleRoot,
["timeAnchor"] = input.TimeAnchor.ToUniversalTime().ToString("O"),
["publisher"] = input.Publisher,
["hashAlgorithm"] = input.HashAlgorithm,
["contents"] = new JsonArray(input.Contents.Select(c => (JsonNode)c).ToArray())
}
};
var envelope = new JsonObject
{
["event"] = new JsonObject
{
["id"] = eventId.ToString(),
["type"] = LedgerEventConstants.EventAirgapBundleImported,
["tenant"] = input.TenantId,
["chainId"] = chainId.ToString(),
["sequence"] = sequence,
["policyVersion"] = input.MirrorGeneration ?? "airgap-bundle",
["artifactId"] = input.BundleId,
["finding"] = new JsonObject
{
["id"] = input.BundleId,
["artifactId"] = input.BundleId,
["vulnId"] = "airgap-import"
},
["actor"] = new JsonObject
{
["id"] = input.ImportOperator ?? "airgap-operator",
["type"] = "operator"
},
["occurredAt"] = FormatTimestamp(input.TimeAnchor),
["recordedAt"] = FormatTimestamp(recordedAt),
["payload"] = payload.DeepClone()
}
};
var draft = new LedgerEventDraft(
input.TenantId,
chainId,
sequence,
eventId,
LedgerEventConstants.EventAirgapBundleImported,
input.MirrorGeneration ?? "airgap-bundle",
input.BundleId,
input.BundleId,
SourceRunId: null,
ActorId: input.ImportOperator ?? "airgap-operator",
ActorType: "operator",
OccurredAt: input.TimeAnchor.ToUniversalTime(),
RecordedAt: recordedAt,
Payload: payload,
CanonicalEnvelope: envelope,
ProvidedPreviousHash: previousHash);
var writeResult = await _writeService.AppendAsync(draft, cancellationToken).ConfigureAwait(false);
if (writeResult.Status is not (LedgerWriteStatus.Success or LedgerWriteStatus.Idempotent))
{
var error = string.Join(";", writeResult.Errors);
return new AirgapImportResult(false, chainId, sequence, writeResult.Record?.EventId, error);
}
var ledgerEventId = writeResult.Record?.EventId;
var record = new AirgapImportRecord(
input.TenantId,
input.BundleId,
input.MirrorGeneration,
input.MerkleRoot,
input.TimeAnchor.ToUniversalTime(),
input.Publisher,
input.HashAlgorithm,
new JsonArray(input.Contents.Select(c => (JsonNode)c).ToArray()),
recordedAt,
input.ImportOperator,
ledgerEventId);
await _repository.InsertAsync(record, cancellationToken).ConfigureAwait(false);
LedgerTimeline.EmitAirgapImport(_logger, input.TenantId, input.BundleId, input.MerkleRoot, ledgerEventId);
return new AirgapImportResult(true, chainId, sequence, ledgerEventId, null);
}
private static string FormatTimestamp(DateTimeOffset value)
=> value.ToUniversalTime().ToString("yyyy-MM-dd'T'HH:mm:ss.fff'Z'");
}

View File

@@ -22,6 +22,11 @@ public static class LedgerProjectionReducer
var status = evaluation.Status ?? DetermineStatus(record.EventType, payload, current?.Status);
var severity = evaluation.Severity ?? DetermineSeverity(payload, current?.Severity);
var riskScore = evaluation.RiskScore ?? current?.RiskScore;
var riskSeverity = evaluation.RiskSeverity ?? current?.RiskSeverity;
var riskProfileVersion = evaluation.RiskProfileVersion ?? current?.RiskProfileVersion;
var riskExplanationId = evaluation.RiskExplanationId ?? current?.RiskExplanationId;
var riskEventSequence = evaluation.RiskEventSequence ?? current?.RiskEventSequence ?? record.SequenceNumber;
var labels = CloneLabels(evaluation.Labels);
MergeLabels(labels, payload);
@@ -41,6 +46,11 @@ public static class LedgerProjectionReducer
record.PolicyVersion,
status,
severity,
riskScore,
riskSeverity,
riskProfileVersion,
riskExplanationId,
riskEventSequence,
labels,
record.EventId,
explainRef,

View File

@@ -0,0 +1,86 @@
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Hashing;
using StellaOps.Findings.Ledger.Infrastructure.Exports;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Services;
public sealed record OrchestratorExportInput(
string TenantId,
Guid RunId,
string JobType,
string ArtifactHash,
string PolicyHash,
DateTimeOffset StartedAt,
DateTimeOffset? CompletedAt,
string Status,
string? ManifestPath,
string? LogsPath);
public sealed class OrchestratorExportService
{
private readonly IOrchestratorExportRepository _repository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<OrchestratorExportService> _logger;
public OrchestratorExportService(
IOrchestratorExportRepository repository,
TimeProvider timeProvider,
ILogger<OrchestratorExportService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<OrchestratorExportRecord> RecordAsync(OrchestratorExportInput input, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(input);
var canonical = CreateCanonicalPayload(input);
var merkleRoot = HashUtilities.ComputeSha256Hex(LedgerCanonicalJsonSerializer.Serialize(canonical));
var record = new OrchestratorExportRecord(
input.TenantId,
input.RunId,
input.JobType,
input.ArtifactHash,
input.PolicyHash,
input.StartedAt.ToUniversalTime(),
input.CompletedAt?.ToUniversalTime(),
input.Status,
input.ManifestPath,
input.LogsPath,
merkleRoot,
_timeProvider.GetUtcNow());
await _repository.InsertAsync(record, cancellationToken).ConfigureAwait(false);
LedgerTimeline.EmitOrchestratorExport(_logger, record);
return record;
}
public Task<IReadOnlyList<OrchestratorExportRecord>> GetByArtifactAsync(string tenantId, string artifactHash, CancellationToken cancellationToken)
{
return _repository.GetByArtifactAsync(tenantId, artifactHash, cancellationToken);
}
private static JsonObject CreateCanonicalPayload(OrchestratorExportInput input)
{
var payload = new JsonObject
{
["tenantId"] = input.TenantId,
["runId"] = input.RunId.ToString(),
["jobType"] = input.JobType,
["artifactHash"] = input.ArtifactHash,
["policyHash"] = input.PolicyHash,
["startedAt"] = input.StartedAt.ToUniversalTime().ToString("O"),
["completedAt"] = input.CompletedAt?.ToUniversalTime().ToString("O"),
["status"] = input.Status,
["manifestPath"] = input.ManifestPath,
["logsPath"] = input.LogsPath
};
return LedgerCanonicalJsonSerializer.Canonicalize(payload);
}
}

View File

@@ -0,0 +1,9 @@
# Findings Ledger · Sprint 0120-0000-0001
| Task ID | Status | Notes | Updated (UTC) |
| --- | --- | --- | --- |
| LEDGER-29-008 | DOING | Determinism harness, metrics, replay tests | 2025-11-22 |
| LEDGER-34-101 | TODO | Orchestrator export linkage | 2025-11-22 |
| LEDGER-AIRGAP-56-001 | TODO | Mirror bundle provenance recording | 2025-11-22 |
Status changes must be mirrored in `docs/implplan/SPRINT_0120_0000_0001_policy_reasoning.md`.

View File

@@ -0,0 +1,51 @@
-- 006_orchestrator_airgap.sql
-- Add orchestrator export provenance and air-gap import provenance tables (LEDGER-34-101, LEDGER-AIRGAP-56-001)
BEGIN;
CREATE TABLE IF NOT EXISTS orchestrator_exports
(
tenant_id TEXT NOT NULL,
run_id UUID NOT NULL,
job_type TEXT NOT NULL,
artifact_hash TEXT NOT NULL,
policy_hash TEXT NOT NULL,
started_at TIMESTAMPTZ NOT NULL,
completed_at TIMESTAMPTZ,
status TEXT NOT NULL,
manifest_path TEXT,
logs_path TEXT,
merkle_root CHAR(64) NOT NULL,
created_at TIMESTAMPTZ NOT NULL,
PRIMARY KEY (tenant_id, run_id)
);
CREATE UNIQUE INDEX IF NOT EXISTS ix_orchestrator_exports_artifact_run
ON orchestrator_exports (tenant_id, artifact_hash, run_id);
CREATE INDEX IF NOT EXISTS ix_orchestrator_exports_artifact
ON orchestrator_exports (tenant_id, artifact_hash);
CREATE TABLE IF NOT EXISTS airgap_imports
(
tenant_id TEXT NOT NULL,
bundle_id TEXT NOT NULL,
mirror_generation TEXT,
merkle_root TEXT NOT NULL,
time_anchor TIMESTAMPTZ NOT NULL,
publisher TEXT,
hash_algorithm TEXT,
contents JSONB,
imported_at TIMESTAMPTZ NOT NULL,
import_operator TEXT,
ledger_event_id UUID,
PRIMARY KEY (tenant_id, bundle_id, time_anchor)
);
CREATE INDEX IF NOT EXISTS ix_airgap_imports_bundle
ON airgap_imports (tenant_id, bundle_id);
CREATE INDEX IF NOT EXISTS ix_airgap_imports_event
ON airgap_imports (tenant_id, ledger_event_id);
COMMIT;

View File

@@ -105,7 +105,8 @@ root.SetHandler(async (FileInfo[] fixtures, string connection, string tenant, in
var verification = await VerifyLedgerAsync(scope.ServiceProvider, tenant, eventsWritten, cts.Token).ConfigureAwait(false);
var writeLatencyP95Ms = Percentile(metrics.HistDouble("ledger_write_latency_seconds"), 95) * 1000;
var writeDurations = metrics.HistDouble("ledger_write_duration_seconds").Concat(metrics.HistDouble("ledger_write_latency_seconds"));
var writeLatencyP95Ms = Percentile(writeDurations, 95) * 1000;
var rebuildP95Ms = Percentile(metrics.HistDouble("ledger_projection_rebuild_seconds"), 95) * 1000;
var projectionLagSeconds = metrics.GaugeDouble("ledger_projection_lag_seconds").DefaultIfEmpty(0).Max();
var backlogEvents = metrics.GaugeLong("ledger_ingest_backlog_events").DefaultIfEmpty(0).Max();

View File

@@ -36,6 +36,11 @@ public sealed class InlinePolicyEvaluationServiceTests
"policy-sha",
"affected",
7.1m,
null,
null,
null,
null,
1,
new JsonObject { ["deprecated"] = "true" },
Guid.NewGuid(),
null,
@@ -68,6 +73,11 @@ public sealed class InlinePolicyEvaluationServiceTests
"policy-sha",
"accepted_risk",
3.4m,
null,
null,
null,
null,
1,
new JsonObject { ["runtime"] = "contained" },
Guid.NewGuid(),
"explain://existing",

View File

@@ -32,6 +32,11 @@ public sealed class LedgerProjectionReducerTests
var evaluation = new PolicyEvaluationResult(
"triaged",
6.5m,
null,
null,
null,
null,
1,
(JsonObject)payload["labels"]!.DeepClone(),
payload["explainRef"]!.GetValue<string>(),
new JsonArray(payload["explainRef"]!.GetValue<string>()));
@@ -62,6 +67,11 @@ public sealed class LedgerProjectionReducerTests
"policy-v1",
"affected",
5.0m,
null,
null,
null,
null,
1,
new JsonObject(),
Guid.NewGuid(),
null,
@@ -82,6 +92,11 @@ public sealed class LedgerProjectionReducerTests
var evaluation = new PolicyEvaluationResult(
"accepted_risk",
existing.Severity,
null,
null,
null,
null,
existing.RiskEventSequence,
(JsonObject)existing.Labels.DeepClone(),
null,
new JsonArray());
@@ -110,6 +125,11 @@ public sealed class LedgerProjectionReducerTests
"policy-v1",
"triaged",
7.1m,
null,
null,
null,
null,
1,
labels,
Guid.NewGuid(),
null,
@@ -133,6 +153,11 @@ public sealed class LedgerProjectionReducerTests
var evaluation = new PolicyEvaluationResult(
"triaged",
existing.Severity,
null,
null,
null,
null,
existing.RiskEventSequence,
(JsonObject)payload["labels"]!.DeepClone(),
null,
new JsonArray());