834 lines
33 KiB
C#
834 lines
33 KiB
C#
using Npgsql;
|
|
using StellaOps.Findings.Ledger.Domain;
|
|
using StellaOps.Findings.Ledger.Infrastructure.Snapshot;
|
|
using System.Diagnostics;
|
|
using System.Text;
|
|
using System.Text.Json;
|
|
|
|
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
|
|
|
|
|
|
/// <summary>
|
|
/// PostgreSQL implementation of time-travel repository.
|
|
/// </summary>
|
|
public sealed class PostgresTimeTravelRepository : ITimeTravelRepository
|
|
{
|
|
private readonly NpgsqlDataSource _dataSource;
|
|
private readonly ISnapshotRepository _snapshotRepository;
|
|
private readonly JsonSerializerOptions _jsonOptions;
|
|
|
|
public PostgresTimeTravelRepository(
|
|
NpgsqlDataSource dataSource,
|
|
ISnapshotRepository snapshotRepository)
|
|
{
|
|
_dataSource = dataSource;
|
|
_snapshotRepository = snapshotRepository;
|
|
_jsonOptions = new JsonSerializerOptions
|
|
{
|
|
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
|
WriteIndented = false
|
|
};
|
|
}
|
|
|
|
public async Task<QueryPoint> GetCurrentPointAsync(
|
|
string tenantId,
|
|
CancellationToken ct = default)
|
|
{
|
|
const string sql = """
|
|
SELECT COALESCE(MAX(sequence_number), 0) as seq,
|
|
COALESCE(MAX(recorded_at), NOW()) as ts
|
|
FROM ledger_events
|
|
WHERE tenant_id = @tenantId
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
|
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
await reader.ReadAsync(ct);
|
|
|
|
return new QueryPoint(
|
|
Timestamp: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("ts")),
|
|
SequenceNumber: reader.GetInt64(reader.GetOrdinal("seq")));
|
|
}
|
|
|
|
public async Task<QueryPoint?> ResolveQueryPointAsync(
|
|
string tenantId,
|
|
DateTimeOffset? timestamp,
|
|
long? sequence,
|
|
Guid? snapshotId,
|
|
CancellationToken ct = default)
|
|
{
|
|
// If snapshot ID is provided, get point from snapshot
|
|
if (snapshotId.HasValue)
|
|
{
|
|
var snapshot = await _snapshotRepository.GetByIdAsync(tenantId, snapshotId.Value, ct);
|
|
if (snapshot == null)
|
|
return null;
|
|
|
|
return new QueryPoint(
|
|
Timestamp: snapshot.Timestamp,
|
|
SequenceNumber: snapshot.SequenceNumber,
|
|
SnapshotId: snapshotId);
|
|
}
|
|
|
|
// If sequence is provided, get timestamp for that sequence
|
|
if (sequence.HasValue)
|
|
{
|
|
const string sql = """
|
|
SELECT recorded_at FROM ledger_events
|
|
WHERE tenant_id = @tenantId AND sequence_number = @seq
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
|
cmd.Parameters.AddWithValue("seq", sequence.Value);
|
|
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
if (!await reader.ReadAsync(ct))
|
|
return null;
|
|
|
|
return new QueryPoint(
|
|
Timestamp: reader.GetFieldValue<DateTimeOffset>(0),
|
|
SequenceNumber: sequence.Value);
|
|
}
|
|
|
|
// If timestamp is provided, find the sequence at that point
|
|
if (timestamp.HasValue)
|
|
{
|
|
const string sql = """
|
|
SELECT sequence_number, recorded_at FROM ledger_events
|
|
WHERE tenant_id = @tenantId AND recorded_at <= @ts
|
|
ORDER BY sequence_number DESC
|
|
LIMIT 1
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
|
cmd.Parameters.AddWithValue("ts", timestamp.Value);
|
|
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
if (!await reader.ReadAsync(ct))
|
|
{
|
|
// No events before timestamp, return point at 0
|
|
return new QueryPoint(timestamp.Value, 0);
|
|
}
|
|
|
|
return new QueryPoint(
|
|
Timestamp: reader.GetFieldValue<DateTimeOffset>(1),
|
|
SequenceNumber: reader.GetInt64(0));
|
|
}
|
|
|
|
// No constraints - return current point
|
|
return await GetCurrentPointAsync(tenantId, ct);
|
|
}
|
|
|
|
public async Task<HistoricalQueryResponse<FindingHistoryItem>> QueryFindingsAsync(
|
|
HistoricalQueryRequest request,
|
|
CancellationToken ct = default)
|
|
{
|
|
var queryPoint = await ResolveQueryPointAsync(
|
|
request.TenantId,
|
|
request.AtTimestamp,
|
|
request.AtSequence,
|
|
request.SnapshotId,
|
|
ct);
|
|
|
|
if (queryPoint == null)
|
|
{
|
|
return new HistoricalQueryResponse<FindingHistoryItem>(
|
|
new QueryPoint(DateTimeOffset.UtcNow, 0),
|
|
EntityType.Finding,
|
|
Array.Empty<FindingHistoryItem>(),
|
|
null,
|
|
0);
|
|
}
|
|
|
|
// Query findings state at the sequence point using event sourcing
|
|
var sql = new StringBuilder("""
|
|
WITH finding_state AS (
|
|
SELECT
|
|
e.finding_id,
|
|
e.artifact_id,
|
|
e.payload->>'vulnId' as vuln_id,
|
|
e.payload->>'status' as status,
|
|
(e.payload->>'severity')::decimal as severity,
|
|
e.policy_version,
|
|
MIN(e.recorded_at) OVER (PARTITION BY e.finding_id) as first_seen,
|
|
e.recorded_at as last_updated,
|
|
e.payload->'labels' as labels,
|
|
ROW_NUMBER() OVER (PARTITION BY e.finding_id ORDER BY e.sequence_number DESC) as rn
|
|
FROM ledger_events e
|
|
WHERE e.tenant_id = @tenantId
|
|
AND e.sequence_number <= @seq
|
|
AND e.finding_id IS NOT NULL
|
|
)
|
|
SELECT finding_id, artifact_id, vuln_id, status, severity,
|
|
policy_version, first_seen, last_updated, labels
|
|
FROM finding_state
|
|
WHERE rn = 1
|
|
""");
|
|
|
|
var parameters = new List<NpgsqlParameter>
|
|
{
|
|
new("tenantId", request.TenantId),
|
|
new("seq", queryPoint.SequenceNumber)
|
|
};
|
|
|
|
// Apply filters
|
|
if (request.Filters != null)
|
|
{
|
|
if (!string.IsNullOrEmpty(request.Filters.Status))
|
|
{
|
|
sql.Append(" AND status = @status");
|
|
parameters.Add(new NpgsqlParameter("status", request.Filters.Status));
|
|
}
|
|
|
|
if (request.Filters.SeverityMin.HasValue)
|
|
{
|
|
sql.Append(" AND severity >= @sevMin");
|
|
parameters.Add(new NpgsqlParameter("sevMin", request.Filters.SeverityMin.Value));
|
|
}
|
|
|
|
if (request.Filters.SeverityMax.HasValue)
|
|
{
|
|
sql.Append(" AND severity <= @sevMax");
|
|
parameters.Add(new NpgsqlParameter("sevMax", request.Filters.SeverityMax.Value));
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(request.Filters.ArtifactId))
|
|
{
|
|
sql.Append(" AND artifact_id = @artifactId");
|
|
parameters.Add(new NpgsqlParameter("artifactId", request.Filters.ArtifactId));
|
|
}
|
|
|
|
if (!string.IsNullOrEmpty(request.Filters.VulnId))
|
|
{
|
|
sql.Append(" AND vuln_id = @vulnId");
|
|
parameters.Add(new NpgsqlParameter("vulnId", request.Filters.VulnId));
|
|
}
|
|
}
|
|
|
|
// Pagination
|
|
if (!string.IsNullOrEmpty(request.PageToken))
|
|
{
|
|
sql.Append(" AND finding_id > @lastId");
|
|
parameters.Add(new NpgsqlParameter("lastId", request.PageToken));
|
|
}
|
|
|
|
sql.Append(" ORDER BY finding_id LIMIT @limit");
|
|
parameters.Add(new NpgsqlParameter("limit", request.PageSize + 1));
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql.ToString());
|
|
cmd.Parameters.AddRange(parameters.ToArray());
|
|
|
|
var items = new List<FindingHistoryItem>();
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
|
|
while (await reader.ReadAsync(ct) && items.Count < request.PageSize)
|
|
{
|
|
var labelsJson = reader.IsDBNull(reader.GetOrdinal("labels"))
|
|
? null
|
|
: reader.GetString(reader.GetOrdinal("labels"));
|
|
|
|
items.Add(new FindingHistoryItem(
|
|
FindingId: reader.GetString(reader.GetOrdinal("finding_id")),
|
|
ArtifactId: reader.GetString(reader.GetOrdinal("artifact_id")),
|
|
VulnId: reader.GetString(reader.GetOrdinal("vuln_id")),
|
|
Status: reader.GetString(reader.GetOrdinal("status")),
|
|
Severity: reader.IsDBNull(reader.GetOrdinal("severity")) ? null : reader.GetDecimal(reader.GetOrdinal("severity")),
|
|
PolicyVersion: reader.IsDBNull(reader.GetOrdinal("policy_version")) ? null : reader.GetString(reader.GetOrdinal("policy_version")),
|
|
FirstSeen: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("first_seen")),
|
|
LastUpdated: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("last_updated")),
|
|
Labels: string.IsNullOrEmpty(labelsJson)
|
|
? null
|
|
: JsonSerializer.Deserialize<Dictionary<string, string>>(labelsJson, _jsonOptions)));
|
|
}
|
|
|
|
string? nextPageToken = null;
|
|
if (await reader.ReadAsync(ct))
|
|
{
|
|
nextPageToken = items.Last().FindingId;
|
|
}
|
|
|
|
return new HistoricalQueryResponse<FindingHistoryItem>(
|
|
queryPoint,
|
|
EntityType.Finding,
|
|
items,
|
|
nextPageToken,
|
|
items.Count);
|
|
}
|
|
|
|
public async Task<HistoricalQueryResponse<VexHistoryItem>> QueryVexAsync(
|
|
HistoricalQueryRequest request,
|
|
CancellationToken ct = default)
|
|
{
|
|
var queryPoint = await ResolveQueryPointAsync(
|
|
request.TenantId,
|
|
request.AtTimestamp,
|
|
request.AtSequence,
|
|
request.SnapshotId,
|
|
ct);
|
|
|
|
if (queryPoint == null)
|
|
{
|
|
return new HistoricalQueryResponse<VexHistoryItem>(
|
|
new QueryPoint(DateTimeOffset.UtcNow, 0),
|
|
EntityType.Vex,
|
|
Array.Empty<VexHistoryItem>(),
|
|
null,
|
|
0);
|
|
}
|
|
|
|
const string sql = """
|
|
WITH vex_state AS (
|
|
SELECT
|
|
e.payload->>'statementId' as statement_id,
|
|
e.payload->>'vulnId' as vuln_id,
|
|
e.payload->>'productId' as product_id,
|
|
e.payload->>'status' as status,
|
|
e.payload->>'justification' as justification,
|
|
(e.payload->>'issuedAt')::timestamptz as issued_at,
|
|
(e.payload->>'expiresAt')::timestamptz as expires_at,
|
|
ROW_NUMBER() OVER (PARTITION BY e.payload->>'statementId' ORDER BY e.sequence_number DESC) as rn
|
|
FROM ledger_events e
|
|
WHERE e.tenant_id = @tenantId
|
|
AND e.sequence_number <= @seq
|
|
AND e.event_type LIKE 'vex.%'
|
|
)
|
|
SELECT statement_id, vuln_id, product_id, status, justification, issued_at, expires_at
|
|
FROM vex_state
|
|
WHERE rn = 1
|
|
ORDER BY statement_id
|
|
LIMIT @limit
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", request.TenantId);
|
|
cmd.Parameters.AddWithValue("seq", queryPoint.SequenceNumber);
|
|
cmd.Parameters.AddWithValue("limit", request.PageSize);
|
|
|
|
var items = new List<VexHistoryItem>();
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
|
|
while (await reader.ReadAsync(ct))
|
|
{
|
|
items.Add(new VexHistoryItem(
|
|
StatementId: reader.GetString(reader.GetOrdinal("statement_id")),
|
|
VulnId: reader.GetString(reader.GetOrdinal("vuln_id")),
|
|
ProductId: reader.GetString(reader.GetOrdinal("product_id")),
|
|
Status: reader.GetString(reader.GetOrdinal("status")),
|
|
Justification: reader.IsDBNull(reader.GetOrdinal("justification")) ? null : reader.GetString(reader.GetOrdinal("justification")),
|
|
IssuedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("issued_at")),
|
|
ExpiresAt: reader.IsDBNull(reader.GetOrdinal("expires_at")) ? null : reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("expires_at"))));
|
|
}
|
|
|
|
return new HistoricalQueryResponse<VexHistoryItem>(
|
|
queryPoint,
|
|
EntityType.Vex,
|
|
items,
|
|
null,
|
|
items.Count);
|
|
}
|
|
|
|
public async Task<HistoricalQueryResponse<AdvisoryHistoryItem>> QueryAdvisoriesAsync(
|
|
HistoricalQueryRequest request,
|
|
CancellationToken ct = default)
|
|
{
|
|
var queryPoint = await ResolveQueryPointAsync(
|
|
request.TenantId,
|
|
request.AtTimestamp,
|
|
request.AtSequence,
|
|
request.SnapshotId,
|
|
ct);
|
|
|
|
if (queryPoint == null)
|
|
{
|
|
return new HistoricalQueryResponse<AdvisoryHistoryItem>(
|
|
new QueryPoint(DateTimeOffset.UtcNow, 0),
|
|
EntityType.Advisory,
|
|
Array.Empty<AdvisoryHistoryItem>(),
|
|
null,
|
|
0);
|
|
}
|
|
|
|
const string sql = """
|
|
WITH advisory_state AS (
|
|
SELECT
|
|
e.payload->>'advisoryId' as advisory_id,
|
|
e.payload->>'source' as source,
|
|
e.payload->>'title' as title,
|
|
(e.payload->>'cvssScore')::decimal as cvss_score,
|
|
(e.payload->>'publishedAt')::timestamptz as published_at,
|
|
(e.payload->>'modifiedAt')::timestamptz as modified_at,
|
|
ROW_NUMBER() OVER (PARTITION BY e.payload->>'advisoryId' ORDER BY e.sequence_number DESC) as rn
|
|
FROM ledger_events e
|
|
WHERE e.tenant_id = @tenantId
|
|
AND e.sequence_number <= @seq
|
|
AND e.event_type LIKE 'advisory.%'
|
|
)
|
|
SELECT advisory_id, source, title, cvss_score, published_at, modified_at
|
|
FROM advisory_state
|
|
WHERE rn = 1
|
|
ORDER BY advisory_id
|
|
LIMIT @limit
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", request.TenantId);
|
|
cmd.Parameters.AddWithValue("seq", queryPoint.SequenceNumber);
|
|
cmd.Parameters.AddWithValue("limit", request.PageSize);
|
|
|
|
var items = new List<AdvisoryHistoryItem>();
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
|
|
while (await reader.ReadAsync(ct))
|
|
{
|
|
items.Add(new AdvisoryHistoryItem(
|
|
AdvisoryId: reader.GetString(reader.GetOrdinal("advisory_id")),
|
|
Source: reader.GetString(reader.GetOrdinal("source")),
|
|
Title: reader.GetString(reader.GetOrdinal("title")),
|
|
CvssScore: reader.IsDBNull(reader.GetOrdinal("cvss_score")) ? null : reader.GetDecimal(reader.GetOrdinal("cvss_score")),
|
|
PublishedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("published_at")),
|
|
ModifiedAt: reader.IsDBNull(reader.GetOrdinal("modified_at")) ? null : reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("modified_at"))));
|
|
}
|
|
|
|
return new HistoricalQueryResponse<AdvisoryHistoryItem>(
|
|
queryPoint,
|
|
EntityType.Advisory,
|
|
items,
|
|
null,
|
|
items.Count);
|
|
}
|
|
|
|
public async Task<(IReadOnlyList<ReplayEvent> Events, ReplayMetadata Metadata)> ReplayEventsAsync(
|
|
ReplayRequest request,
|
|
CancellationToken ct = default)
|
|
{
|
|
var sw = Stopwatch.StartNew();
|
|
|
|
var sql = new StringBuilder("""
|
|
SELECT event_id, sequence_number, chain_id, chain_sequence,
|
|
event_type, occurred_at, recorded_at,
|
|
actor_id, actor_type, artifact_id, finding_id,
|
|
policy_version, event_hash, previous_hash, payload
|
|
FROM ledger_events
|
|
WHERE tenant_id = @tenantId
|
|
""");
|
|
|
|
var parameters = new List<NpgsqlParameter>
|
|
{
|
|
new("tenantId", request.TenantId)
|
|
};
|
|
|
|
if (request.FromSequence.HasValue)
|
|
{
|
|
sql.Append(" AND sequence_number >= @fromSeq");
|
|
parameters.Add(new NpgsqlParameter("fromSeq", request.FromSequence.Value));
|
|
}
|
|
|
|
if (request.ToSequence.HasValue)
|
|
{
|
|
sql.Append(" AND sequence_number <= @toSeq");
|
|
parameters.Add(new NpgsqlParameter("toSeq", request.ToSequence.Value));
|
|
}
|
|
|
|
if (request.FromTimestamp.HasValue)
|
|
{
|
|
sql.Append(" AND recorded_at >= @fromTs");
|
|
parameters.Add(new NpgsqlParameter("fromTs", request.FromTimestamp.Value));
|
|
}
|
|
|
|
if (request.ToTimestamp.HasValue)
|
|
{
|
|
sql.Append(" AND recorded_at <= @toTs");
|
|
parameters.Add(new NpgsqlParameter("toTs", request.ToTimestamp.Value));
|
|
}
|
|
|
|
if (request.ChainIds?.Count > 0)
|
|
{
|
|
sql.Append(" AND chain_id = ANY(@chainIds)");
|
|
parameters.Add(new NpgsqlParameter("chainIds", request.ChainIds.ToArray()));
|
|
}
|
|
|
|
if (request.EventTypes?.Count > 0)
|
|
{
|
|
sql.Append(" AND event_type = ANY(@eventTypes)");
|
|
parameters.Add(new NpgsqlParameter("eventTypes", request.EventTypes.ToArray()));
|
|
}
|
|
|
|
sql.Append(" ORDER BY sequence_number LIMIT @limit");
|
|
parameters.Add(new NpgsqlParameter("limit", request.PageSize + 1));
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql.ToString());
|
|
cmd.Parameters.AddRange(parameters.ToArray());
|
|
|
|
var events = new List<ReplayEvent>();
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
|
|
while (await reader.ReadAsync(ct) && events.Count < request.PageSize)
|
|
{
|
|
object? payload = null;
|
|
if (request.IncludePayload && !reader.IsDBNull(reader.GetOrdinal("payload")))
|
|
{
|
|
var payloadJson = reader.GetString(reader.GetOrdinal("payload"));
|
|
payload = JsonSerializer.Deserialize<object>(payloadJson, _jsonOptions);
|
|
}
|
|
|
|
events.Add(new ReplayEvent(
|
|
EventId: reader.GetGuid(reader.GetOrdinal("event_id")),
|
|
SequenceNumber: reader.GetInt64(reader.GetOrdinal("sequence_number")),
|
|
ChainId: reader.GetGuid(reader.GetOrdinal("chain_id")),
|
|
ChainSequence: reader.GetInt32(reader.GetOrdinal("chain_sequence")),
|
|
EventType: reader.GetString(reader.GetOrdinal("event_type")),
|
|
OccurredAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("occurred_at")),
|
|
RecordedAt: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("recorded_at")),
|
|
ActorId: reader.IsDBNull(reader.GetOrdinal("actor_id")) ? null : reader.GetString(reader.GetOrdinal("actor_id")),
|
|
ActorType: reader.IsDBNull(reader.GetOrdinal("actor_type")) ? null : reader.GetString(reader.GetOrdinal("actor_type")),
|
|
ArtifactId: reader.IsDBNull(reader.GetOrdinal("artifact_id")) ? null : reader.GetString(reader.GetOrdinal("artifact_id")),
|
|
FindingId: reader.IsDBNull(reader.GetOrdinal("finding_id")) ? null : reader.GetString(reader.GetOrdinal("finding_id")),
|
|
PolicyVersion: reader.IsDBNull(reader.GetOrdinal("policy_version")) ? null : reader.GetString(reader.GetOrdinal("policy_version")),
|
|
EventHash: reader.GetString(reader.GetOrdinal("event_hash")),
|
|
PreviousHash: reader.GetString(reader.GetOrdinal("previous_hash")),
|
|
Payload: payload));
|
|
}
|
|
|
|
var hasMore = await reader.ReadAsync(ct);
|
|
sw.Stop();
|
|
|
|
var fromSeq = events.Count > 0 ? events.First().SequenceNumber : 0;
|
|
var toSeq = events.Count > 0 ? events.Last().SequenceNumber : 0;
|
|
|
|
var metadata = new ReplayMetadata(
|
|
FromSequence: fromSeq,
|
|
ToSequence: toSeq,
|
|
EventsCount: events.Count,
|
|
HasMore: hasMore,
|
|
ReplayDurationMs: sw.ElapsedMilliseconds);
|
|
|
|
return (events, metadata);
|
|
}
|
|
|
|
public async Task<DiffResponse> ComputeDiffAsync(
|
|
DiffRequest request,
|
|
CancellationToken ct = default)
|
|
{
|
|
var fromPoint = await ResolveQueryPointAsync(
|
|
request.TenantId,
|
|
request.From.Timestamp,
|
|
request.From.SequenceNumber,
|
|
request.From.SnapshotId,
|
|
ct) ?? new QueryPoint(DateTimeOffset.MinValue, 0);
|
|
|
|
var toPoint = await ResolveQueryPointAsync(
|
|
request.TenantId,
|
|
request.To.Timestamp,
|
|
request.To.SequenceNumber,
|
|
request.To.SnapshotId,
|
|
ct) ?? await GetCurrentPointAsync(request.TenantId, ct);
|
|
|
|
// Count changes between the two points
|
|
const string countSql = """
|
|
WITH changes AS (
|
|
SELECT
|
|
CASE
|
|
WHEN e.event_type LIKE 'finding.%' THEN 'Finding'
|
|
WHEN e.event_type LIKE 'vex.%' THEN 'Vex'
|
|
WHEN e.event_type LIKE 'advisory.%' THEN 'Advisory'
|
|
WHEN e.event_type LIKE 'sbom.%' THEN 'Sbom'
|
|
ELSE 'Evidence'
|
|
END as entity_type,
|
|
CASE
|
|
WHEN e.event_type LIKE '%.created' THEN 'Added'
|
|
WHEN e.event_type LIKE '%.deleted' THEN 'Removed'
|
|
ELSE 'Modified'
|
|
END as change_type
|
|
FROM ledger_events e
|
|
WHERE e.tenant_id = @tenantId
|
|
AND e.sequence_number > @fromSeq
|
|
AND e.sequence_number <= @toSeq
|
|
)
|
|
SELECT entity_type, change_type, COUNT(*) as cnt
|
|
FROM changes
|
|
GROUP BY entity_type, change_type
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(countSql);
|
|
cmd.Parameters.AddWithValue("tenantId", request.TenantId);
|
|
cmd.Parameters.AddWithValue("fromSeq", fromPoint.SequenceNumber);
|
|
cmd.Parameters.AddWithValue("toSeq", toPoint.SequenceNumber);
|
|
|
|
var byEntityType = new Dictionary<EntityType, DiffCounts>();
|
|
int totalAdded = 0, totalModified = 0, totalRemoved = 0;
|
|
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
while (await reader.ReadAsync(ct))
|
|
{
|
|
var entityTypeStr = reader.GetString(0);
|
|
var changeType = reader.GetString(1);
|
|
var count = (int)reader.GetInt64(2);
|
|
|
|
if (Enum.TryParse<EntityType>(entityTypeStr, out var entityType))
|
|
{
|
|
if (!byEntityType.TryGetValue(entityType, out var counts))
|
|
{
|
|
counts = new DiffCounts(0, 0, 0);
|
|
}
|
|
|
|
byEntityType[entityType] = changeType switch
|
|
{
|
|
"Added" => counts with { Added = counts.Added + count },
|
|
"Removed" => counts with { Removed = counts.Removed + count },
|
|
_ => counts with { Modified = counts.Modified + count }
|
|
};
|
|
|
|
switch (changeType)
|
|
{
|
|
case "Added": totalAdded += count; break;
|
|
case "Removed": totalRemoved += count; break;
|
|
default: totalModified += count; break;
|
|
}
|
|
}
|
|
}
|
|
|
|
var summary = new DiffSummary(
|
|
Added: totalAdded,
|
|
Modified: totalModified,
|
|
Removed: totalRemoved,
|
|
Unchanged: 0,
|
|
ByEntityType: byEntityType.Count > 0 ? byEntityType : null);
|
|
|
|
// For detailed output, include individual changes
|
|
IReadOnlyList<DiffEntry>? changes = null;
|
|
if (request.OutputFormat != DiffOutputFormat.Summary)
|
|
{
|
|
changes = await GetDetailedChangesAsync(
|
|
request.TenantId,
|
|
fromPoint.SequenceNumber,
|
|
toPoint.SequenceNumber,
|
|
request.EntityTypes,
|
|
ct);
|
|
}
|
|
|
|
return new DiffResponse(
|
|
FromPoint: fromPoint,
|
|
ToPoint: toPoint,
|
|
Summary: summary,
|
|
Changes: changes,
|
|
NextPageToken: null);
|
|
}
|
|
|
|
private async Task<IReadOnlyList<DiffEntry>> GetDetailedChangesAsync(
|
|
string tenantId,
|
|
long fromSeq,
|
|
long toSeq,
|
|
IReadOnlyList<EntityType>? entityTypes,
|
|
CancellationToken ct)
|
|
{
|
|
var sql = new StringBuilder("""
|
|
SELECT
|
|
e.event_type,
|
|
COALESCE(e.finding_id, e.artifact_id, e.payload->>'entityId') as entity_id,
|
|
e.payload as to_state
|
|
FROM ledger_events e
|
|
WHERE e.tenant_id = @tenantId
|
|
AND e.sequence_number > @fromSeq
|
|
AND e.sequence_number <= @toSeq
|
|
""");
|
|
|
|
if (entityTypes?.Count > 0)
|
|
{
|
|
var patterns = entityTypes.Select(et => et switch
|
|
{
|
|
EntityType.Finding => "finding.%",
|
|
EntityType.Vex => "vex.%",
|
|
EntityType.Advisory => "advisory.%",
|
|
EntityType.Sbom => "sbom.%",
|
|
_ => "evidence.%"
|
|
}).ToList();
|
|
|
|
sql.Append(" AND (");
|
|
for (int i = 0; i < patterns.Count; i++)
|
|
{
|
|
if (i > 0) sql.Append(" OR ");
|
|
sql.Append($"e.event_type LIKE @pattern{i}");
|
|
}
|
|
sql.Append(")");
|
|
}
|
|
|
|
sql.Append(" ORDER BY e.sequence_number LIMIT 1000");
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql.ToString());
|
|
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
|
cmd.Parameters.AddWithValue("fromSeq", fromSeq);
|
|
cmd.Parameters.AddWithValue("toSeq", toSeq);
|
|
|
|
if (entityTypes?.Count > 0)
|
|
{
|
|
var patterns = entityTypes.Select(et => et switch
|
|
{
|
|
EntityType.Finding => "finding.%",
|
|
EntityType.Vex => "vex.%",
|
|
EntityType.Advisory => "advisory.%",
|
|
EntityType.Sbom => "sbom.%",
|
|
_ => "evidence.%"
|
|
}).ToList();
|
|
|
|
for (int i = 0; i < patterns.Count; i++)
|
|
{
|
|
cmd.Parameters.AddWithValue($"pattern{i}", patterns[i]);
|
|
}
|
|
}
|
|
|
|
var entries = new List<DiffEntry>();
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
|
|
while (await reader.ReadAsync(ct))
|
|
{
|
|
var eventType = reader.GetString(0);
|
|
var entityId = reader.IsDBNull(1) ? "unknown" : reader.GetString(1);
|
|
var toStateJson = reader.IsDBNull(2) ? null : reader.GetString(2);
|
|
|
|
var entityType = eventType switch
|
|
{
|
|
var et when et.StartsWith("finding.") => EntityType.Finding,
|
|
var et when et.StartsWith("vex.") => EntityType.Vex,
|
|
var et when et.StartsWith("advisory.") => EntityType.Advisory,
|
|
var et when et.StartsWith("sbom.") => EntityType.Sbom,
|
|
_ => EntityType.Evidence
|
|
};
|
|
|
|
var changeType = eventType switch
|
|
{
|
|
var et when et.EndsWith(".created") => DiffChangeType.Added,
|
|
var et when et.EndsWith(".deleted") => DiffChangeType.Removed,
|
|
_ => DiffChangeType.Modified
|
|
};
|
|
|
|
object? toState = null;
|
|
if (!string.IsNullOrEmpty(toStateJson))
|
|
{
|
|
toState = JsonSerializer.Deserialize<object>(toStateJson, _jsonOptions);
|
|
}
|
|
|
|
entries.Add(new DiffEntry(
|
|
EntityType: entityType,
|
|
EntityId: entityId,
|
|
ChangeType: changeType,
|
|
FromState: null,
|
|
ToState: toState,
|
|
ChangedFields: null));
|
|
}
|
|
|
|
return entries;
|
|
}
|
|
|
|
public async Task<IReadOnlyList<ChangeLogEntry>> GetChangelogAsync(
|
|
string tenantId,
|
|
EntityType entityType,
|
|
string entityId,
|
|
int limit = 100,
|
|
CancellationToken ct = default)
|
|
{
|
|
var eventTypePrefix = entityType switch
|
|
{
|
|
EntityType.Finding => "finding.",
|
|
EntityType.Vex => "vex.",
|
|
EntityType.Advisory => "advisory.",
|
|
EntityType.Sbom => "sbom.",
|
|
_ => "evidence."
|
|
};
|
|
|
|
const string sql = """
|
|
SELECT sequence_number, recorded_at, event_type, event_hash, actor_id,
|
|
COALESCE(payload->>'summary', event_type) as summary
|
|
FROM ledger_events
|
|
WHERE tenant_id = @tenantId
|
|
AND event_type LIKE @eventTypePrefix
|
|
AND (finding_id = @entityId OR artifact_id = @entityId OR payload->>'entityId' = @entityId)
|
|
ORDER BY sequence_number DESC
|
|
LIMIT @limit
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
|
cmd.Parameters.AddWithValue("eventTypePrefix", eventTypePrefix + "%");
|
|
cmd.Parameters.AddWithValue("entityId", entityId);
|
|
cmd.Parameters.AddWithValue("limit", limit);
|
|
|
|
var entries = new List<ChangeLogEntry>();
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
|
|
while (await reader.ReadAsync(ct))
|
|
{
|
|
entries.Add(new ChangeLogEntry(
|
|
SequenceNumber: reader.GetInt64(reader.GetOrdinal("sequence_number")),
|
|
Timestamp: reader.GetFieldValue<DateTimeOffset>(reader.GetOrdinal("recorded_at")),
|
|
EntityType: entityType,
|
|
EntityId: entityId,
|
|
EventType: reader.GetString(reader.GetOrdinal("event_type")),
|
|
EventHash: reader.IsDBNull(reader.GetOrdinal("event_hash")) ? null : reader.GetString(reader.GetOrdinal("event_hash")),
|
|
ActorId: reader.IsDBNull(reader.GetOrdinal("actor_id")) ? null : reader.GetString(reader.GetOrdinal("actor_id")),
|
|
Summary: reader.IsDBNull(reader.GetOrdinal("summary")) ? null : reader.GetString(reader.GetOrdinal("summary"))));
|
|
}
|
|
|
|
return entries;
|
|
}
|
|
|
|
public async Task<StalenessResult> CheckStalenessAsync(
|
|
string tenantId,
|
|
TimeSpan threshold,
|
|
CancellationToken ct = default)
|
|
{
|
|
var checkedAt = DateTimeOffset.UtcNow;
|
|
|
|
const string sql = """
|
|
SELECT
|
|
MAX(recorded_at) as last_event,
|
|
MAX(CASE WHEN event_type LIKE 'finding.%' THEN recorded_at END) as finding_last,
|
|
MAX(CASE WHEN event_type LIKE 'vex.%' THEN recorded_at END) as vex_last,
|
|
MAX(CASE WHEN event_type LIKE 'advisory.%' THEN recorded_at END) as advisory_last
|
|
FROM ledger_events
|
|
WHERE tenant_id = @tenantId
|
|
""";
|
|
|
|
await using var cmd = _dataSource.CreateCommand(sql);
|
|
cmd.Parameters.AddWithValue("tenantId", tenantId);
|
|
|
|
await using var reader = await cmd.ExecuteReaderAsync(ct);
|
|
await reader.ReadAsync(ct);
|
|
|
|
var lastEventAt = reader.IsDBNull(0) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(0);
|
|
var findingLast = reader.IsDBNull(1) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(1);
|
|
var vexLast = reader.IsDBNull(2) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(2);
|
|
var advisoryLast = reader.IsDBNull(3) ? (DateTimeOffset?)null : reader.GetFieldValue<DateTimeOffset>(3);
|
|
|
|
var isStale = lastEventAt.HasValue && (checkedAt - lastEventAt.Value) > threshold;
|
|
var stalenessDuration = lastEventAt.HasValue ? checkedAt - lastEventAt.Value : (TimeSpan?)null;
|
|
|
|
var byEntityType = new Dictionary<EntityType, EntityStaleness>
|
|
{
|
|
[EntityType.Finding] = new EntityStaleness(
|
|
findingLast.HasValue && (checkedAt - findingLast.Value) > threshold,
|
|
findingLast,
|
|
0),
|
|
[EntityType.Vex] = new EntityStaleness(
|
|
vexLast.HasValue && (checkedAt - vexLast.Value) > threshold,
|
|
vexLast,
|
|
0),
|
|
[EntityType.Advisory] = new EntityStaleness(
|
|
advisoryLast.HasValue && (checkedAt - advisoryLast.Value) > threshold,
|
|
advisoryLast,
|
|
0)
|
|
};
|
|
|
|
return new StalenessResult(
|
|
IsStale: isStale,
|
|
CheckedAt: checkedAt,
|
|
LastEventAt: lastEventAt,
|
|
StalenessThreshold: threshold,
|
|
StalenessDuration: stalenessDuration,
|
|
ByEntityType: byEntityType);
|
|
}
|
|
}
|