prep docs and service updates
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

This commit is contained in:
master
2025-11-21 06:56:36 +00:00
parent ca35db9ef4
commit d519782a8f
242 changed files with 17293 additions and 13367 deletions

View File

@@ -0,0 +1,65 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Findings.Ledger.WebService.Contracts;
using StellaOps.Findings.Ledger.WebService.Services;
using Xunit;
namespace StellaOps.Findings.Ledger.Tests.Exports;
public class AttestationQueryServiceTests
{
[Fact]
public void ComputeFiltersHash_IsDeterministic()
{
var svc = new AttestationQueryService(NullLogger<AttestationQueryService>.Instance);
var requestA = new AttestationQueryRequest(
TenantId: "t1",
ArtifactId: "sha256:a",
FindingId: null,
AttestationId: null,
Status: "verified",
SinceRecordedAt: DateTimeOffset.Parse("2024-01-01T00:00:00Z"),
UntilRecordedAt: DateTimeOffset.Parse("2024-01-02T00:00:00Z"),
Limit: 100,
FiltersHash: string.Empty,
PagingKey: null);
var requestB = requestA with { FiltersHash = "anything" };
var hashA = svc.ComputeFiltersHash(requestA);
var hashB = svc.ComputeFiltersHash(requestB);
Assert.Equal(hashA, hashB);
}
[Fact]
public void PageToken_RoundTrips()
{
var svc = new AttestationQueryService(NullLogger<AttestationQueryService>.Instance);
var request = new AttestationQueryRequest(
TenantId: "t1",
ArtifactId: "sha256:a",
FindingId: "f1",
AttestationId: "att-1",
Status: "verified",
SinceRecordedAt: DateTimeOffset.Parse("2024-01-01T00:00:00Z"),
UntilRecordedAt: DateTimeOffset.Parse("2024-01-02T00:00:00Z"),
Limit: 50,
FiltersHash: string.Empty,
PagingKey: null);
var filtersHash = svc.ComputeFiltersHash(request);
var key = new AttestationPagingKey(DateTimeOffset.Parse("2024-01-01T12:00:00Z"), "att-9");
var token = svc.CreatePageToken(key, filtersHash);
var ok = svc.TryParsePageToken(token, filtersHash, out var parsed, out var error);
Assert.True(ok);
Assert.Null(error);
Assert.NotNull(parsed);
Assert.Equal(key.RecordedAt, parsed!.RecordedAt);
Assert.Equal(key.AttestationId, parsed.AttestationId);
}
}

View File

@@ -0,0 +1,54 @@
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
using Xunit;
namespace StellaOps.Findings.Ledger.Tests;
public sealed class ProjectionHashingTests
{
[Fact]
public void ComputeCycleHash_IncludesRiskFields()
{
var projection = CreateProjection(riskScore: 5.5m, riskSeverity: "high");
var hashWithRisk = ProjectionHashing.ComputeCycleHash(projection);
var changedRisk = projection with { RiskScore = 4.0m };
var hashChangedRisk = ProjectionHashing.ComputeCycleHash(changedRisk);
Assert.NotEqual(hashWithRisk, hashChangedRisk);
}
[Fact]
public void ComputeCycleHash_ChangesWhenRiskExplanationChanges()
{
var projection = CreateProjection(riskExplanationId: Guid.NewGuid());
var hashWithExplanation = ProjectionHashing.ComputeCycleHash(projection);
var projectionDifferent = projection with { RiskExplanationId = Guid.NewGuid() };
var hashWithDifferentExplanation = ProjectionHashing.ComputeCycleHash(projectionDifferent);
Assert.NotEqual(hashWithExplanation, hashWithDifferentExplanation);
}
private static FindingProjection CreateProjection(decimal? riskScore = null, string? riskSeverity = null, Guid? riskExplanationId = null)
{
return new FindingProjection(
TenantId: "t1",
FindingId: "f1",
PolicyVersion: "v1",
Status: "affected",
Severity: 7.5m,
RiskScore: riskScore,
RiskSeverity: riskSeverity,
RiskProfileVersion: "profile-1",
RiskExplanationId: riskExplanationId,
RiskEventSequence: 1,
Labels: new JsonObject { ["k"] = "v" },
CurrentEventId: Guid.NewGuid(),
ExplainRef: "ref",
PolicyRationale: new JsonArray("r1"),
UpdatedAt: DateTimeOffset.UtcNow,
CycleHash: string.Empty);
}
}

View File

@@ -1,26 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="xunit" Version="2.8.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.1">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<FrameworkReference Include="Microsoft.AspNetCore.App" />
</ItemGroup>
<ItemGroup>
<Reference Include="StellaOps.Findings.Ledger">
<HintPath>..\StellaOps.Findings.Ledger\bin\Release\net10.0\StellaOps.Findings.Ledger.dll</HintPath>
<Private>true</Private>
</Reference>
</ItemGroup>
<ItemGroup>
<Compile Remove="**/*.cs" />
<Compile Include="Exports/ExportPagingTests.cs" />
</ItemGroup>
</Project>

View File

@@ -1,24 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<DefaultItemExcludes>$(DefaultItemExcludes);**/tools/**/*</DefaultItemExcludes>
<DisableTransitiveProjectReferences>true</DisableTransitiveProjectReferences>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Findings.Ledger\\StellaOps.Findings.Ledger.csproj" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="xunit" Version="2.8.1" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.1">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="coverlet.collector" Version="6.0.0">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<ProjectReference Include="../StellaOps.Findings.Ledger/StellaOps.Findings.Ledger.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Remove="**/*.cs" />
<Compile Include="ProjectionHashingTests.cs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
<PackageReference Include="xunit" Version="2.5.4" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.4" />
<PackageReference Include="coverlet.collector" Version="6.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,266 @@
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Findings.Ledger.Infrastructure.Exports;
using StellaOps.Findings.Ledger.Infrastructure.Postgres;
using StellaOps.Findings.Ledger.WebService.Contracts;
namespace StellaOps.Findings.Ledger.WebService.Services;
/// <summary>
/// Provides deterministic paging helpers and SQL-backed queries for attestation verifications.
/// </summary>
public sealed class AttestationQueryService
{
private const int DefaultLimit = 200;
private const int MaxLimit = 1000;
private readonly LedgerDataSource? _dataSource;
private readonly ILogger<AttestationQueryService> _logger;
public AttestationQueryService(ILogger<AttestationQueryService> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public AttestationQueryService(LedgerDataSource dataSource, ILogger<AttestationQueryService> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public int ClampLimit(int? requested)
{
if (!requested.HasValue || requested.Value <= 0)
{
return DefaultLimit;
}
return Math.Min(requested.Value, MaxLimit);
}
public string ComputeFiltersHash(AttestationQueryRequest request)
{
var filters = new Dictionary<string, string?>
{
["artifact_id"] = request.ArtifactId,
["finding_id"] = request.FindingId,
["attestation_id"] = request.AttestationId,
["status"] = request.Status,
["since_recorded_at"] = request.SinceRecordedAt?.ToString("O"),
["until_recorded_at"] = request.UntilRecordedAt?.ToString("O"),
["limit"] = request.Limit.ToString()
};
return ExportPaging.ComputeFiltersHash(filters);
}
public bool TryParsePageToken(string token, string expectedFiltersHash, out AttestationPagingKey? key, out string? error)
{
key = null;
error = null;
var base64 = token.Replace('-', '+').Replace('_', '/');
while (base64.Length % 4 != 0)
{
base64 += '=';
}
byte[] decodedBytes;
try
{
decodedBytes = Convert.FromBase64String(base64);
}
catch (FormatException)
{
error = "invalid_page_token_encoding";
return false;
}
AttestationPageToken? payload;
try
{
payload = JsonSerializer.Deserialize<AttestationPageToken>(decodedBytes);
}
catch (JsonException)
{
error = "invalid_page_token_payload";
return false;
}
if (payload is null || payload.Last is null)
{
error = "invalid_page_token_payload";
return false;
}
if (!string.Equals(payload.FiltersHash, expectedFiltersHash, StringComparison.Ordinal))
{
error = "page_token_filters_mismatch";
return false;
}
if (!DateTimeOffset.TryParse(payload.Last.RecordedAt, out var recordedAt))
{
error = "invalid_page_token_payload";
return false;
}
key = new AttestationPagingKey(recordedAt, payload.Last.AttestationId);
return true;
}
public string CreatePageToken(AttestationPagingKey key, string filtersHash)
{
var payload = new AttestationPageToken
{
FiltersHash = filtersHash,
Last = new AttestationPageKey
{
RecordedAt = key.RecordedAt.ToString("O"),
AttestationId = key.AttestationId
}
};
var json = JsonSerializer.Serialize(payload);
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(json))
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
public async Task<ExportPage<AttestationExportItem>> GetAttestationsAsync(AttestationQueryRequest request, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
if (_dataSource is null)
{
throw new InvalidOperationException("data_source_unavailable");
}
if (!string.Equals(request.FiltersHash, ComputeFiltersHash(request), StringComparison.Ordinal))
{
throw new InvalidOperationException("filters_hash_mismatch");
}
const string baseSql = """
SELECT attestation_id,
artifact_id,
finding_id,
verification_status,
verification_time,
dsse_digest,
rekor_entry_id,
evidence_bundle_ref,
ledger_event_id,
recorded_at,
merkle_leaf_hash,
root_hash
FROM ledger_attestations
WHERE tenant_id = @tenant_id
""";
var sqlBuilder = new StringBuilder(baseSql);
var parameters = new List<NpgsqlParameter>
{
new("tenant_id", request.TenantId) { NpgsqlDbType = NpgsqlDbType.Text }
};
if (!string.IsNullOrWhiteSpace(request.ArtifactId))
{
sqlBuilder.Append(" AND artifact_id = @artifact_id");
parameters.Add(new NpgsqlParameter<string>("artifact_id", request.ArtifactId) { NpgsqlDbType = NpgsqlDbType.Text });
}
if (!string.IsNullOrWhiteSpace(request.FindingId))
{
sqlBuilder.Append(" AND finding_id = @finding_id");
parameters.Add(new NpgsqlParameter<string>("finding_id", request.FindingId) { NpgsqlDbType = NpgsqlDbType.Text });
}
if (!string.IsNullOrWhiteSpace(request.AttestationId))
{
sqlBuilder.Append(" AND attestation_id = @attestation_id");
parameters.Add(new NpgsqlParameter<string>("attestation_id", request.AttestationId) { NpgsqlDbType = NpgsqlDbType.Text });
}
if (!string.IsNullOrWhiteSpace(request.Status))
{
sqlBuilder.Append(" AND verification_status = @status");
parameters.Add(new NpgsqlParameter<string>("status", request.Status) { NpgsqlDbType = NpgsqlDbType.Text });
}
if (request.SinceRecordedAt.HasValue)
{
sqlBuilder.Append(" AND recorded_at >= @since_recorded_at");
parameters.Add(new NpgsqlParameter<DateTimeOffset>("since_recorded_at", request.SinceRecordedAt.Value) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
}
if (request.UntilRecordedAt.HasValue)
{
sqlBuilder.Append(" AND recorded_at <= @until_recorded_at");
parameters.Add(new NpgsqlParameter<DateTimeOffset>("until_recorded_at", request.UntilRecordedAt.Value) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
}
if (request.PagingKey is not null)
{
sqlBuilder.Append(" AND (recorded_at > @cursor_recorded_at OR (recorded_at = @cursor_recorded_at AND attestation_id > @cursor_attestation_id))");
parameters.Add(new NpgsqlParameter<DateTimeOffset>("cursor_recorded_at", request.PagingKey.RecordedAt) { NpgsqlDbType = NpgsqlDbType.TimestampTz });
parameters.Add(new NpgsqlParameter<string>("cursor_attestation_id", request.PagingKey.AttestationId) { NpgsqlDbType = NpgsqlDbType.Text });
}
sqlBuilder.Append(" ORDER BY recorded_at ASC, attestation_id ASC");
sqlBuilder.Append(" LIMIT @take");
parameters.Add(new NpgsqlParameter<int>("take", request.Limit + 1) { NpgsqlDbType = NpgsqlDbType.Integer });
await using var connection = await _dataSource.OpenConnectionAsync(request.TenantId, cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sqlBuilder.ToString(), connection)
{
CommandTimeout = _dataSource.CommandTimeoutSeconds
};
command.Parameters.AddRange(parameters.ToArray());
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var items = new List<AttestationExportItem>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
items.Add(new AttestationExportItem(
AttestationId: reader.GetString(0),
ArtifactId: reader.GetString(1),
FindingId: reader.IsDBNull(2) ? null : reader.GetString(2),
VerificationStatus: reader.GetString(3),
VerificationTime: reader.GetFieldValue<DateTimeOffset>(4),
DsseDigest: reader.GetString(5),
RekorEntryId: reader.IsDBNull(6) ? null : reader.GetString(6),
EvidenceBundleRef: reader.IsDBNull(7) ? null : reader.GetString(7),
LedgerEventId: reader.GetGuid(8).ToString(),
RecordedAt: reader.GetFieldValue<DateTimeOffset>(9),
MerkleLeafHash: reader.GetString(10),
RootHash: reader.GetString(11)));
}
string? nextPageToken = null;
if (items.Count > request.Limit)
{
var last = items[request.Limit];
items = items.Take(request.Limit).ToList();
var key = new AttestationPagingKey(last.RecordedAt, last.AttestationId);
nextPageToken = CreatePageToken(key, request.FiltersHash);
}
return new ExportPage<AttestationExportItem>(items, nextPageToken);
}
private sealed class AttestationPageToken
{
public string FiltersHash { get; set; } = string.Empty;
public AttestationPageKey? Last { get; set; }
}
private sealed class AttestationPageKey
{
public string RecordedAt { get; set; } = string.Empty;
public string AttestationId { get; set; } = string.Empty;
}
}

View File

@@ -10,6 +10,11 @@ public static class ProjectionHashing
private const string PolicyVersionProperty = nameof(FindingProjection.PolicyVersion);
private const string StatusProperty = nameof(FindingProjection.Status);
private const string SeverityProperty = nameof(FindingProjection.Severity);
private const string RiskScoreProperty = nameof(FindingProjection.RiskScore);
private const string RiskSeverityProperty = nameof(FindingProjection.RiskSeverity);
private const string RiskProfileVersionProperty = nameof(FindingProjection.RiskProfileVersion);
private const string RiskExplanationIdProperty = nameof(FindingProjection.RiskExplanationId);
private const string RiskEventSequenceProperty = nameof(FindingProjection.RiskEventSequence);
private const string LabelsProperty = nameof(FindingProjection.Labels);
private const string CurrentEventIdProperty = nameof(FindingProjection.CurrentEventId);
private const string ExplainRefProperty = nameof(FindingProjection.ExplainRef);
@@ -27,6 +32,11 @@ public static class ProjectionHashing
[PolicyVersionProperty] = projection.PolicyVersion,
[StatusProperty] = projection.Status,
[SeverityProperty] = projection.Severity,
[RiskScoreProperty] = projection.RiskScore,
[RiskSeverityProperty] = projection.RiskSeverity,
[RiskProfileVersionProperty] = projection.RiskProfileVersion,
[RiskExplanationIdProperty] = projection.RiskExplanationId?.ToString(),
[RiskEventSequenceProperty] = projection.RiskEventSequence,
[LabelsProperty] = projection.Labels.DeepClone(),
[CurrentEventIdProperty] = projection.CurrentEventId.ToString(),
[ExplainRefProperty] = projection.ExplainRef,

View File

@@ -14,6 +14,11 @@ public interface IPolicyEvaluationService
public sealed record PolicyEvaluationResult(
string? Status,
decimal? Severity,
decimal? RiskScore,
string? RiskSeverity,
string? RiskProfileVersion,
Guid? RiskExplanationId,
long? RiskEventSequence,
JsonObject Labels,
string? ExplainRef,
JsonArray Rationale);

View File

@@ -42,6 +42,11 @@ public sealed class InlinePolicyEvaluationService : IPolicyEvaluationService
var result = new PolicyEvaluationResult(
status,
severity,
null,
null,
null,
null,
existingProjection?.RiskEventSequence,
labels,
explainRef,
rationale);
@@ -62,6 +67,11 @@ public sealed class InlinePolicyEvaluationService : IPolicyEvaluationService
return new PolicyEvaluationResult(
existingProjection?.Status,
existingProjection?.Severity,
existingProjection?.RiskScore,
existingProjection?.RiskSeverity,
existingProjection?.RiskProfileVersion,
existingProjection?.RiskExplanationId,
existingProjection?.RiskEventSequence,
labels,
existingProjection?.ExplainRef,
rationale);

View File

@@ -129,6 +129,10 @@ internal sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
{
["status"] = existingProjection.Status,
["severity"] = existingProjection.Severity,
["riskScore"] = existingProjection.RiskScore,
["riskSeverity"] = existingProjection.RiskSeverity,
["riskProfileVersion"] = existingProjection.RiskProfileVersion,
["riskExplanationId"] = existingProjection.RiskExplanationId?.ToString(),
["labels"] = existingProjection.Labels.DeepClone(),
["explainRef"] = existingProjection.ExplainRef,
["rationale"] = existingProjection.PolicyRationale.DeepClone()
@@ -168,6 +172,22 @@ internal sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
severity = decimalSeverity;
}
decimal? riskScore = null;
var riskScoreElement = item.GetPropertyOrDefault("riskScore");
if (riskScoreElement.HasValue && riskScoreElement.Value.ValueKind == JsonValueKind.Number && riskScoreElement.Value.TryGetDecimal(out var decimalRiskScore))
{
riskScore = decimalRiskScore;
}
var riskSeverity = item.GetPropertyOrDefault("riskSeverity")?.GetString();
var riskProfileVersion = item.GetPropertyOrDefault("riskProfileVersion")?.GetString();
Guid? riskExplanationId = null;
var riskExplanationElement = item.GetPropertyOrDefault("riskExplanationId");
if (riskExplanationElement.HasValue && riskExplanationElement.Value.ValueKind == JsonValueKind.String &&
Guid.TryParse(riskExplanationElement.Value.GetString(), out var parsedExplanation))
{
riskExplanationId = parsedExplanation;
}
var labelsNode = new JsonObject();
var labelsElement = item.GetPropertyOrDefault("labels");
if (labelsElement.HasValue && labelsElement.Value.ValueKind == JsonValueKind.Object)
@@ -175,6 +195,12 @@ internal sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
labelsNode = (JsonObject)labelsElement.Value.ToJsonNode()!;
}
var explainRef = item.GetPropertyOrDefault("explainRef")?.GetString();
long? riskEventSequence = null;
var riskEventSequenceElement = item.GetPropertyOrDefault("riskEventSequence");
if (riskEventSequenceElement.HasValue && riskEventSequenceElement.Value.ValueKind == JsonValueKind.Number)
{
riskEventSequence = riskEventSequenceElement.Value.GetInt64();
}
JsonArray rationale;
var rationaleElement = item.GetPropertyOrDefault("rationale");
@@ -191,7 +217,17 @@ internal sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
rationale = (JsonArray)rationaleElement.Value.ToJsonNode()!;
}
return new PolicyEvaluationResult(status, severity, labelsNode, explainRef, rationale);
return new PolicyEvaluationResult(
status,
severity,
riskScore,
riskSeverity,
riskProfileVersion,
riskExplanationId,
riskEventSequence ?? record.SequenceNumber,
labelsNode,
explainRef,
rationale);
}
throw new InvalidOperationException("Policy engine response did not include evaluation for requested finding.");

View File

@@ -66,6 +66,11 @@ internal sealed class PolicyEvaluationCache : IDisposable
return new PolicyEvaluationResult(
result.Status,
result.Severity,
result.RiskScore,
result.RiskSeverity,
result.RiskProfileVersion,
result.RiskExplanationId,
result.RiskEventSequence,
labelsClone,
result.ExplainRef,
rationaleClone);

View File

@@ -0,0 +1,40 @@
-- 004_ledger_attestations.sql
-- LEDGER-OBS-54-001: storage for attestation verification exports
BEGIN;
CREATE TABLE IF NOT EXISTS ledger_attestations (
tenant_id text NOT NULL,
attestation_id uuid NOT NULL,
artifact_id text NOT NULL,
finding_id text NULL,
verification_status text NOT NULL,
verification_time timestamptz NOT NULL,
dsse_digest text NOT NULL,
rekor_entry_id text NULL,
evidence_bundle_ref text NULL,
ledger_event_id uuid NOT NULL,
recorded_at timestamptz NOT NULL,
merkle_leaf_hash text NOT NULL,
root_hash text NOT NULL,
cycle_hash text NOT NULL,
projection_version text NOT NULL
);
ALTER TABLE ledger_attestations
ADD CONSTRAINT pk_ledger_attestations PRIMARY KEY (tenant_id, attestation_id);
CREATE INDEX IF NOT EXISTS ix_ledger_attestations_recorded
ON ledger_attestations (tenant_id, recorded_at, attestation_id);
CREATE INDEX IF NOT EXISTS ix_ledger_attestations_artifact
ON ledger_attestations (tenant_id, artifact_id, recorded_at DESC);
CREATE INDEX IF NOT EXISTS ix_ledger_attestations_finding
ON ledger_attestations (tenant_id, finding_id, recorded_at DESC)
WHERE finding_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS ix_ledger_attestations_status
ON ledger_attestations (tenant_id, verification_status, recorded_at DESC);
COMMIT;

View File

@@ -0,0 +1,15 @@
-- 004_risk_fields.sql
-- Add risk scoring fields to findings_projection (LEDGER-RISK-66-001/002)
BEGIN;
ALTER TABLE findings_projection
ADD COLUMN IF NOT EXISTS risk_score NUMERIC(6,3),
ADD COLUMN IF NOT EXISTS risk_severity TEXT,
ADD COLUMN IF NOT EXISTS risk_profile_version TEXT,
ADD COLUMN IF NOT EXISTS risk_explanation_id UUID,
ADD COLUMN IF NOT EXISTS risk_event_sequence BIGINT;
CREATE INDEX IF NOT EXISTS ix_projection_risk ON findings_projection (tenant_id, risk_severity, risk_score DESC);
COMMIT;

View File

@@ -0,0 +1,16 @@
-- 005_risk_fields.sql
-- LEDGER-RISK-66-001: add risk scoring fields to findings projection
BEGIN;
ALTER TABLE findings_projection
ADD COLUMN IF NOT EXISTS risk_score numeric(6,2) NULL,
ADD COLUMN IF NOT EXISTS risk_severity text NULL,
ADD COLUMN IF NOT EXISTS risk_profile_version text NULL,
ADD COLUMN IF NOT EXISTS risk_explanation_id text NULL,
ADD COLUMN IF NOT EXISTS risk_event_sequence bigint NULL;
CREATE INDEX IF NOT EXISTS ix_findings_projection_risk
ON findings_projection (tenant_id, risk_severity, risk_score DESC, recorded_at DESC);
COMMIT;

View File

@@ -460,6 +460,10 @@ internal sealed class NoOpPolicyEvaluationService : IPolicyEvaluationService
return Task.FromResult(new PolicyEvaluationResult(
Status: current?.Status ?? "new",
Severity: current?.Severity,
RiskScore: current?.RiskScore,
RiskSeverity: current?.RiskSeverity,
RiskProfileVersion: current?.RiskProfileVersion,
RiskExplanationId: current?.RiskExplanationId,
Labels: labels,
ExplainRef: null,
Rationale: new JsonArray()));