Refactor code structure for improved readability and maintainability

This commit is contained in:
StellaOps Bot
2025-12-06 10:23:40 +02:00
parent 6beb9d7c4e
commit 37304cf819
78 changed files with 5471 additions and 104 deletions

View File

@@ -10,7 +10,8 @@ public sealed record AdvisoryObservationQueryResponse(
ImmutableArray<AdvisoryObservation> Observations,
AdvisoryObservationLinksetAggregateResponse Linkset,
string? NextCursor,
bool HasMore);
bool HasMore,
DataFreshnessInfo? Freshness = null);
public sealed record AdvisoryObservationLinksetAggregateResponse(
ImmutableArray<string> Aliases,

View File

@@ -0,0 +1,217 @@
using System.Text.Json.Serialization;
namespace StellaOps.Concelier.WebService.Contracts;
/// <summary>
/// Staleness metadata for air-gapped deployments.
/// Per CONCELIER-WEB-AIRGAP-56-002.
/// </summary>
public sealed record StalenessMetadata
{
/// <summary>
/// When the data was last refreshed from its source.
/// </summary>
[JsonPropertyName("lastRefreshedAt")]
public DateTimeOffset? LastRefreshedAt { get; init; }
/// <summary>
/// Age of the data in seconds since last refresh.
/// </summary>
[JsonPropertyName("ageSeconds")]
public long? AgeSeconds { get; init; }
/// <summary>
/// Whether the data is considered stale based on configured thresholds.
/// </summary>
[JsonPropertyName("isStale")]
public bool IsStale { get; init; }
/// <summary>
/// Staleness threshold in seconds (data older than this is stale).
/// </summary>
[JsonPropertyName("thresholdSeconds")]
public long? ThresholdSeconds { get; init; }
/// <summary>
/// Human-readable staleness status.
/// </summary>
[JsonPropertyName("status")]
public string Status { get; init; } = "unknown";
/// <summary>
/// Creates a fresh staleness metadata.
/// </summary>
public static StalenessMetadata Fresh(DateTimeOffset refreshedAt, long thresholdSeconds = 86400)
{
return new StalenessMetadata
{
LastRefreshedAt = refreshedAt,
AgeSeconds = 0,
IsStale = false,
ThresholdSeconds = thresholdSeconds,
Status = "fresh"
};
}
/// <summary>
/// Creates staleness metadata based on refresh time and threshold.
/// </summary>
public static StalenessMetadata Compute(
DateTimeOffset? lastRefreshedAt,
DateTimeOffset now,
long thresholdSeconds = 86400)
{
if (!lastRefreshedAt.HasValue)
{
return new StalenessMetadata
{
LastRefreshedAt = null,
AgeSeconds = null,
IsStale = true,
ThresholdSeconds = thresholdSeconds,
Status = "unknown"
};
}
var age = (long)(now - lastRefreshedAt.Value).TotalSeconds;
var isStale = age > thresholdSeconds;
return new StalenessMetadata
{
LastRefreshedAt = lastRefreshedAt,
AgeSeconds = age,
IsStale = isStale,
ThresholdSeconds = thresholdSeconds,
Status = isStale ? "stale" : "fresh"
};
}
}
/// <summary>
/// Bundle provenance metadata for air-gapped deployments.
/// Per CONCELIER-WEB-AIRGAP-56-002.
/// </summary>
public sealed record BundleProvenanceMetadata
{
/// <summary>
/// Bundle identifier the data originated from.
/// </summary>
[JsonPropertyName("bundleId")]
public string? BundleId { get; init; }
/// <summary>
/// Bundle version.
/// </summary>
[JsonPropertyName("bundleVersion")]
public string? BundleVersion { get; init; }
/// <summary>
/// Source that provided the bundle.
/// </summary>
[JsonPropertyName("sourceId")]
public string? SourceId { get; init; }
/// <summary>
/// When the bundle was imported.
/// </summary>
[JsonPropertyName("importedAt")]
public DateTimeOffset? ImportedAt { get; init; }
/// <summary>
/// Content hash for integrity verification.
/// </summary>
[JsonPropertyName("contentHash")]
public string? ContentHash { get; init; }
/// <summary>
/// Signature status (verified, unverified, unsigned).
/// </summary>
[JsonPropertyName("signatureStatus")]
public string? SignatureStatus { get; init; }
/// <summary>
/// Key ID used for signing.
/// </summary>
[JsonPropertyName("signatureKeyId")]
public string? SignatureKeyId { get; init; }
/// <summary>
/// Whether this data came from an air-gapped bundle (vs direct ingestion).
/// </summary>
[JsonPropertyName("isAirGapped")]
public bool IsAirGapped { get; init; }
}
/// <summary>
/// Combined data freshness information for API responses.
/// Per CONCELIER-WEB-AIRGAP-56-002.
/// </summary>
public sealed record DataFreshnessInfo
{
/// <summary>
/// Staleness metadata.
/// </summary>
[JsonPropertyName("staleness")]
public StalenessMetadata? Staleness { get; init; }
/// <summary>
/// Bundle provenance if data came from an air-gap bundle.
/// </summary>
[JsonPropertyName("bundleProvenance")]
public BundleProvenanceMetadata? BundleProvenance { get; init; }
/// <summary>
/// Whether data is from an air-gapped source.
/// </summary>
[JsonPropertyName("isAirGapped")]
public bool IsAirGapped => BundleProvenance?.IsAirGapped ?? false;
/// <summary>
/// Computed at timestamp.
/// </summary>
[JsonPropertyName("computedAt")]
public DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Creates freshness info for online (non-air-gapped) data.
/// </summary>
public static DataFreshnessInfo Online(DateTimeOffset now, DateTimeOffset? lastRefreshedAt = null)
{
return new DataFreshnessInfo
{
Staleness = StalenessMetadata.Compute(lastRefreshedAt ?? now, now),
BundleProvenance = null,
ComputedAt = now
};
}
/// <summary>
/// Creates freshness info for air-gapped data.
/// </summary>
public static DataFreshnessInfo AirGapped(
DateTimeOffset now,
string bundleId,
string? bundleVersion,
string sourceId,
DateTimeOffset importedAt,
string? contentHash = null,
string? signatureStatus = null,
long stalenessThresholdSeconds = 86400)
{
return new DataFreshnessInfo
{
Staleness = StalenessMetadata.Compute(importedAt, now, stalenessThresholdSeconds),
BundleProvenance = new BundleProvenanceMetadata
{
BundleId = bundleId,
BundleVersion = bundleVersion,
SourceId = sourceId,
ImportedAt = importedAt,
ContentHash = contentHash,
SignatureStatus = signatureStatus,
IsAirGapped = true
},
ComputedAt = now
};
}
}

View File

@@ -21,7 +21,8 @@ public sealed record LnmLinksetResponse(
[property: JsonPropertyName("normalized")] LnmLinksetNormalized? Normalized,
[property: JsonPropertyName("cached")] bool Cached,
[property: JsonPropertyName("remarks")] IReadOnlyList<string> Remarks,
[property: JsonPropertyName("observations")] IReadOnlyList<string> Observations);
[property: JsonPropertyName("observations")] IReadOnlyList<string> Observations,
[property: JsonPropertyName("freshness")] DataFreshnessInfo? Freshness = null);
public sealed record LnmLinksetPage(
[property: JsonPropertyName("items")] IReadOnlyList<LnmLinksetResponse> Items,

View File

@@ -62,6 +62,9 @@ public static class ErrorCodes
/// <summary>Bundle source not found.</summary>
public const string BundleSourceNotFound = "BUNDLE_SOURCE_NOT_FOUND";
/// <summary>Bundle not found in catalog.</summary>
public const string BundleNotFound = "BUNDLE_NOT_FOUND";
// ─────────────────────────────────────────────────────────────────────────
// AOC (Aggregation-Only Contract) Errors
// ─────────────────────────────────────────────────────────────────────────

View File

@@ -1,3 +1,4 @@
using System.Diagnostics;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
@@ -161,5 +162,118 @@ internal static class AirGapEndpointExtensions
var status = sealedModeEnforcer.GetStatus();
return Results.Ok(status);
});
// POST /api/v1/concelier/airgap/bundles/{bundleId}/import - Import a bundle with timeline event
// Per CONCELIER-WEB-AIRGAP-58-001
group.MapPost("/bundles/{bundleId}/import", async (
HttpContext context,
IBundleCatalogService catalogService,
IBundleTimelineEmitter timelineEmitter,
IOptionsMonitor<ConcelierOptions> optionsMonitor,
string bundleId,
[FromBody] BundleImportRequestDto requestDto,
CancellationToken cancellationToken) =>
{
var airGapOptions = optionsMonitor.CurrentValue.AirGap;
if (!airGapOptions.Enabled)
{
return ConcelierProblemResultFactory.AirGapDisabled(context);
}
if (string.IsNullOrWhiteSpace(requestDto.TenantId))
{
return ConcelierProblemResultFactory.RequiredFieldMissing(context, "tenantId");
}
// Find the bundle in the catalog
var catalog = await catalogService.GetCatalogAsync(null, 1000, cancellationToken)
.ConfigureAwait(false);
var bundle = catalog.Entries.FirstOrDefault(e => e.BundleId == bundleId);
if (bundle is null)
{
return ConcelierProblemResultFactory.BundleNotFound(context, bundleId);
}
// Create actor from request or default
var actor = new BundleImportActor
{
Id = requestDto.ActorId ?? context.User?.Identity?.Name ?? "anonymous",
Type = requestDto.ActorType ?? "user",
DisplayName = requestDto.ActorDisplayName
};
// Create import request
var importRequest = new BundleImportRequest
{
TenantId = requestDto.TenantId,
Bundle = bundle,
Scope = Enum.TryParse<BundleImportScope>(requestDto.Scope, true, out var scope)
? scope
: BundleImportScope.Delta,
Actor = actor,
TraceId = Activity.Current?.TraceId.ToString()
};
// Simulate import (actual import would happen via ingestion pipeline)
var sw = Stopwatch.StartNew();
// TODO: Wire actual bundle import logic here
var importStats = new BundleImportStats
{
TotalItems = bundle.ItemCount,
ItemsAdded = bundle.ItemCount,
ItemsUpdated = 0,
ItemsRemoved = 0,
ItemsSkipped = 0,
DurationMs = sw.ElapsedMilliseconds,
SizeBytes = bundle.SizeBytes
};
var importResult = new BundleImportResult
{
Success = true,
Stats = importStats,
EvidenceBundleRef = requestDto.EvidenceBundleRef
};
// Emit timeline event
var timelineEvent = await timelineEmitter.EmitImportAsync(importRequest, importResult, cancellationToken)
.ConfigureAwait(false);
return Results.Ok(new BundleImportResponseDto
{
EventId = timelineEvent.EventId,
BundleId = bundleId,
TenantId = requestDto.TenantId,
Stats = importStats,
OccurredAt = timelineEvent.OccurredAt
});
});
}
}
/// <summary>
/// Request DTO for bundle import.
/// </summary>
public sealed record BundleImportRequestDto
{
public required string TenantId { get; init; }
public string? Scope { get; init; }
public string? ActorId { get; init; }
public string? ActorType { get; init; }
public string? ActorDisplayName { get; init; }
public string? EvidenceBundleRef { get; init; }
}
/// <summary>
/// Response DTO for bundle import.
/// </summary>
public sealed record BundleImportResponseDto
{
public Guid EventId { get; init; }
public required string BundleId { get; init; }
public required string TenantId { get; init; }
public required BundleImportStats Stats { get; init; }
public DateTimeOffset OccurredAt { get; init; }
}

View File

@@ -2552,7 +2552,8 @@ LnmLinksetResponse ToLnmResponse(
bool includeConflicts,
bool includeTimeline,
bool includeObservations,
LinksetObservationSummary summary)
LinksetObservationSummary summary,
DataFreshnessInfo? freshness = null)
{
var normalized = linkset.Normalized;
var severity = summary.Severity ?? (normalized?.Severities?.FirstOrDefault() is { } severityDict
@@ -2607,7 +2608,8 @@ LnmLinksetResponse ToLnmResponse(
normalizedDto,
Cached: false,
Remarks: Array.Empty<string>(),
Observations: includeObservations ? linkset.ObservationIds : Array.Empty<string>());
Observations: includeObservations ? linkset.ObservationIds : Array.Empty<string>(),
Freshness: freshness);
}
string? ExtractSeverity(IReadOnlyDictionary<string, object?> severityDict)

View File

@@ -199,6 +199,14 @@ public static class ConcelierProblemResultFactory
return NotFound(context, ErrorCodes.BundleSourceNotFound, "Bundle source", sourceId);
}
/// <summary>
/// Creates a 404 Not Found response for bundle not found.
/// </summary>
public static IResult BundleNotFound(HttpContext context, string? bundleId = null)
{
return NotFound(context, ErrorCodes.BundleNotFound, "Bundle", bundleId);
}
/// <summary>
/// Creates a generic 404 Not Found response.
/// </summary>
@@ -316,6 +324,64 @@ public static class ConcelierProblemResultFactory
new Dictionary<string, object?> { ["destination"] = destination });
}
/// <summary>
/// Creates a 403 Forbidden response for egress blocked with full payload and remediation.
/// Per CONCELIER-WEB-AIRGAP-57-001.
/// </summary>
public static IResult EgressBlocked(
HttpContext context,
StellaOps.Concelier.Core.AirGap.SealedModeViolationException exception)
{
var payload = exception.Payload;
var envelope = new ErrorEnvelope
{
Type = "https://stellaops.org/problems/airgap-egress-blocked",
Title = "Egress blocked by sealed mode",
Status = StatusCodes.Status403Forbidden,
Detail = payload.Reason,
Instance = context.Request.Path,
TraceId = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier,
Error = new ErrorDetail
{
Code = StellaOps.Concelier.Core.AirGap.Models.AirGapEgressBlockedPayload.ErrorCode,
Message = payload.Reason,
Target = payload.SourceName,
Metadata = new Dictionary<string, object?>
{
["sourceName"] = payload.SourceName,
["destination"] = payload.Destination,
["destinationHost"] = payload.DestinationHost,
["occurredAt"] = payload.OccurredAt,
["wasBlocked"] = payload.WasBlocked,
["remediation"] = new
{
summary = payload.Remediation.Summary,
steps = payload.Remediation.Steps.Select(s => new
{
order = s.Order,
action = s.Action,
description = s.Description
}).ToArray(),
configurationHints = payload.Remediation.ConfigurationHints.Select(h => new
{
key = h.Key,
description = h.Description,
example = h.Example
}).ToArray(),
documentationLinks = payload.Remediation.DocumentationLinks.Select(l => new
{
title = l.Title,
url = l.Url
}).ToArray()
}
},
HelpUrl = "https://docs.stellaops.org/concelier/airgap/sealed-mode"
}
};
return Microsoft.AspNetCore.Http.Results.Json(envelope, statusCode: StatusCodes.Status403Forbidden);
}
// ─────────────────────────────────────────────────────────────────────────
// Rate Limiting (429)
// ─────────────────────────────────────────────────────────────────────────

View File

@@ -1,8 +1,25 @@
openapi: 3.1.0
info:
title: StellaOps Concelier Link-Not-Merge Policy APIs
version: "0.1.0"
description: Fact-only advisory/linkset retrieval for Policy Engine consumers.
version: "1.0.0"
description: |
Fact-only advisory/linkset retrieval for Policy Engine consumers.
## Philosophy
Link-Not-Merge (LNM) provides raw advisory data with full provenance:
- **Link**: Observations from multiple sources are linked via shared identifiers.
- **Not Merge**: Conflicting data is preserved rather than collapsed.
- **Surface, Don't Resolve**: Conflicts are clearly marked for consumers.
## Authentication
All endpoints require the `X-Stella-Tenant` header for multi-tenant isolation.
## Pagination
List endpoints support cursor-based pagination with `page` and `pageSize` parameters.
Maximum page size is 200 items.
## Documentation
See `/docs/modules/concelier/api/` for detailed examples and conflict resolution strategies.
servers:
- url: /
description: Relative base path (API Gateway rewrites in production).
@@ -44,6 +61,65 @@ paths:
application/json:
schema:
$ref: '#/components/schemas/PagedLinksets'
examples:
single-linkset:
summary: Single linkset result
value:
items:
- advisoryId: "CVE-2021-23337"
source: "nvd"
purl: ["pkg:npm/lodash@4.17.20"]
cpe: ["cpe:2.3:a:lodash:lodash:4.17.20:*:*:*:*:node.js:*:*"]
summary: "Lodash Command Injection vulnerability"
publishedAt: "2021-02-15T13:15:00Z"
modifiedAt: "2024-08-04T19:16:00Z"
severity: "high"
provenance:
ingestedAt: "2025-11-20T10:30:00Z"
connectorId: "nvd-osv-connector"
evidenceHash: "sha256:a1b2c3d4e5f6"
conflicts: []
cached: false
page: 1
pageSize: 50
total: 1
with-conflicts:
summary: Linkset with severity conflict
value:
items:
- advisoryId: "CVE-2024-1234"
source: "aggregated"
purl: ["pkg:npm/example@1.0.0"]
cpe: []
severity: "high"
provenance:
ingestedAt: "2025-11-20T10:30:00Z"
connectorId: "multi-source"
conflicts:
- field: "severity"
reason: "severity-mismatch"
observedValue: "critical"
observedAt: "2025-11-18T08:00:00Z"
evidenceHash: "sha256:conflict-hash"
cached: false
page: 1
pageSize: 50
total: 1
"400":
description: Invalid request parameters
content:
application/json:
schema:
$ref: '#/components/schemas/ErrorEnvelope'
example:
type: "https://stellaops.io/errors/validation-failed"
title: "Validation Failed"
status: 400
detail: "The 'pageSize' parameter exceeds the maximum allowed value."
error:
code: "ERR_PAGE_SIZE_EXCEEDED"
message: "Page size must be between 1 and 200."
target: "pageSize"
/v1/lnm/linksets/{advisoryId}:
get:
summary: Get linkset by advisory ID
@@ -275,3 +351,63 @@ components:
event: { type: string }
at: { type: string, format: date-time }
evidenceHash: { type: string }
ErrorEnvelope:
type: object
description: RFC 7807 Problem Details with StellaOps extensions
properties:
type:
type: string
format: uri
description: URI identifying the problem type
title:
type: string
description: Short, human-readable summary
status:
type: integer
description: HTTP status code
detail:
type: string
description: Specific explanation of the problem
instance:
type: string
format: uri
description: URI of the specific occurrence
traceId:
type: string
description: Distributed trace identifier
error:
$ref: '#/components/schemas/ErrorDetail'
ErrorDetail:
type: object
description: Machine-readable error information
properties:
code:
type: string
description: Machine-readable error code (e.g., ERR_VALIDATION_FAILED)
message:
type: string
description: Human-readable error message
target:
type: string
description: Field or resource that caused the error
metadata:
type: object
additionalProperties: true
description: Additional contextual data
innerErrors:
type: array
items:
$ref: '#/components/schemas/ValidationError'
description: Nested validation errors
ValidationError:
type: object
properties:
field:
type: string
description: Field path (e.g., "data.severity")
code:
type: string
description: Error code for this field
message:
type: string
description: Human-readable message

View File

@@ -46,6 +46,10 @@ public static class AirGapServiceCollectionExtensions
timeProvider: timeProvider);
});
// Register timeline emitter (CONCELIER-WEB-AIRGAP-58-001)
services.TryAddSingleton<IBundleTimelineEmitter, BundleTimelineEmitter>();
services.AddSingleton<IBundleTimelineEventSink, LoggingBundleTimelineEventSink>();
return services;
}
}

View File

@@ -0,0 +1,183 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Concelier.Core.AirGap.Models;
namespace StellaOps.Concelier.Core.AirGap;
/// <summary>
/// Default implementation of bundle timeline event emission.
/// Per CONCELIER-WEB-AIRGAP-58-001.
/// </summary>
public sealed class BundleTimelineEmitter : IBundleTimelineEmitter
{
private readonly TimeProvider _timeProvider;
private readonly ILogger<BundleTimelineEmitter> _logger;
private readonly List<IBundleTimelineEventSink> _sinks;
public BundleTimelineEmitter(
TimeProvider timeProvider,
IEnumerable<IBundleTimelineEventSink> sinks,
ILogger<BundleTimelineEmitter> logger)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_sinks = sinks?.ToList() ?? [];
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task EmitImportAsync(
BundleImportTimelineEvent timelineEvent,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(timelineEvent);
_logger.LogInformation(
"Emitting bundle import timeline event: TenantId={TenantId}, BundleId={BundleId}, SourceId={SourceId}, Type={BundleType}, Scope={Scope}, ItemsAdded={ItemsAdded}, ItemsUpdated={ItemsUpdated}",
timelineEvent.TenantId,
timelineEvent.BundleId,
timelineEvent.SourceId,
timelineEvent.BundleType,
timelineEvent.Scope,
timelineEvent.Stats.ItemsAdded,
timelineEvent.Stats.ItemsUpdated);
// Emit to all registered sinks
var tasks = _sinks.Select(sink => EmitToSinkAsync(sink, timelineEvent, cancellationToken));
await Task.WhenAll(tasks).ConfigureAwait(false);
}
public async Task<BundleImportTimelineEvent> EmitImportAsync(
BundleImportRequest request,
BundleImportResult result,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(result);
var timelineEvent = new BundleImportTimelineEvent
{
EventId = Guid.NewGuid(),
TenantId = request.TenantId,
BundleId = request.Bundle.BundleId,
SourceId = request.Bundle.SourceId,
BundleType = request.Bundle.Type,
Scope = request.Scope,
Actor = request.Actor,
Stats = result.Stats,
EvidenceBundleRef = result.EvidenceBundleRef,
ContentHash = request.Bundle.ContentHash,
OccurredAt = _timeProvider.GetUtcNow(),
TraceId = request.TraceId ?? Activity.Current?.TraceId.ToString()
};
await EmitImportAsync(timelineEvent, cancellationToken).ConfigureAwait(false);
return timelineEvent;
}
private async Task EmitToSinkAsync(
IBundleTimelineEventSink sink,
BundleImportTimelineEvent timelineEvent,
CancellationToken cancellationToken)
{
try
{
await sink.WriteAsync(timelineEvent, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex,
"Failed to emit timeline event to sink {SinkType}: EventId={EventId}, BundleId={BundleId}",
sink.GetType().Name,
timelineEvent.EventId,
timelineEvent.BundleId);
// Swallow exception to allow other sinks to process
}
}
}
/// <summary>
/// Sink for writing bundle timeline events to a destination.
/// </summary>
public interface IBundleTimelineEventSink
{
/// <summary>
/// Writes a timeline event.
/// </summary>
Task WriteAsync(BundleImportTimelineEvent timelineEvent, CancellationToken cancellationToken);
}
/// <summary>
/// In-memory sink for testing or local buffering.
/// </summary>
public sealed class InMemoryBundleTimelineEventSink : IBundleTimelineEventSink
{
private readonly List<BundleImportTimelineEvent> _events = [];
private readonly object _lock = new();
public IReadOnlyList<BundleImportTimelineEvent> Events
{
get
{
lock (_lock)
{
return _events.ToList();
}
}
}
public Task WriteAsync(BundleImportTimelineEvent timelineEvent, CancellationToken cancellationToken)
{
lock (_lock)
{
_events.Add(timelineEvent);
}
return Task.CompletedTask;
}
public void Clear()
{
lock (_lock)
{
_events.Clear();
}
}
}
/// <summary>
/// Logging sink that writes timeline events to structured logs.
/// </summary>
public sealed class LoggingBundleTimelineEventSink : IBundleTimelineEventSink
{
private readonly ILogger<LoggingBundleTimelineEventSink> _logger;
public LoggingBundleTimelineEventSink(ILogger<LoggingBundleTimelineEventSink> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public Task WriteAsync(BundleImportTimelineEvent timelineEvent, CancellationToken cancellationToken)
{
_logger.LogInformation(
"TIMELINE_EVENT: Type={Type}, EventId={EventId}, TenantId={TenantId}, BundleId={BundleId}, " +
"SourceId={SourceId}, BundleType={BundleType}, Scope={Scope}, ActorId={ActorId}, " +
"ItemsAdded={ItemsAdded}, ItemsUpdated={ItemsUpdated}, ItemsRemoved={ItemsRemoved}, " +
"DurationMs={DurationMs}, ContentHash={ContentHash}, TraceId={TraceId}, OccurredAt={OccurredAt}",
timelineEvent.Type,
timelineEvent.EventId,
timelineEvent.TenantId,
timelineEvent.BundleId,
timelineEvent.SourceId,
timelineEvent.BundleType,
timelineEvent.Scope,
timelineEvent.Actor.Id,
timelineEvent.Stats.ItemsAdded,
timelineEvent.Stats.ItemsUpdated,
timelineEvent.Stats.ItemsRemoved,
timelineEvent.Stats.DurationMs,
timelineEvent.ContentHash,
timelineEvent.TraceId,
timelineEvent.OccurredAt.ToString("O"));
return Task.CompletedTask;
}
}

View File

@@ -0,0 +1,82 @@
using StellaOps.Concelier.Core.AirGap.Models;
namespace StellaOps.Concelier.Core.AirGap;
/// <summary>
/// Service for emitting timeline events for bundle operations.
/// Per CONCELIER-WEB-AIRGAP-58-001.
/// </summary>
public interface IBundleTimelineEmitter
{
/// <summary>
/// Emits a timeline event for a bundle import.
/// </summary>
Task EmitImportAsync(
BundleImportTimelineEvent timelineEvent,
CancellationToken cancellationToken = default);
/// <summary>
/// Creates and emits a timeline event for a bundle import.
/// </summary>
Task<BundleImportTimelineEvent> EmitImportAsync(
BundleImportRequest request,
BundleImportResult result,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request for a bundle import operation.
/// </summary>
public sealed record BundleImportRequest
{
/// <summary>
/// Tenant performing the import.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Bundle to import.
/// </summary>
public required BundleCatalogEntry Bundle { get; init; }
/// <summary>
/// Scope of the import.
/// </summary>
public BundleImportScope Scope { get; init; } = BundleImportScope.Delta;
/// <summary>
/// Actor performing the import.
/// </summary>
public required BundleImportActor Actor { get; init; }
/// <summary>
/// Optional trace ID for correlation.
/// </summary>
public string? TraceId { get; init; }
}
/// <summary>
/// Result of a bundle import operation.
/// </summary>
public sealed record BundleImportResult
{
/// <summary>
/// Whether the import succeeded.
/// </summary>
public bool Success { get; init; }
/// <summary>
/// Import statistics.
/// </summary>
public required BundleImportStats Stats { get; init; }
/// <summary>
/// Evidence bundle reference if generated.
/// </summary>
public string? EvidenceBundleRef { get; init; }
/// <summary>
/// Error message if failed.
/// </summary>
public string? ErrorMessage { get; init; }
}

View File

@@ -2,6 +2,8 @@ using StellaOps.Concelier.Core.AirGap.Models;
namespace StellaOps.Concelier.Core.AirGap;
// Per CONCELIER-WEB-AIRGAP-57-001: Egress blocking with remediation guidance
/// <summary>
/// Enforces sealed mode by blocking direct internet feeds.
/// Per CONCELIER-WEB-AIRGAP-56-001.
@@ -37,16 +39,41 @@ public interface ISealedModeEnforcer
/// <summary>
/// Exception thrown when a sealed mode violation occurs.
/// Per CONCELIER-WEB-AIRGAP-57-001.
/// </summary>
public sealed class SealedModeViolationException : Exception
{
public SealedModeViolationException(string sourceName, Uri destination)
: this(sourceName, destination, DateTimeOffset.UtcNow)
{
}
public SealedModeViolationException(string sourceName, Uri destination, DateTimeOffset occurredAt)
: base($"Sealed mode violation: source '{sourceName}' attempted to access '{destination}'")
{
SourceName = sourceName;
Destination = destination;
OccurredAt = occurredAt;
Payload = AirGapEgressBlockedPayload.FromViolation(sourceName, destination, occurredAt, wasBlocked: true);
}
/// <summary>
/// Source name that attempted the egress.
/// </summary>
public string SourceName { get; }
/// <summary>
/// Destination URI that was blocked.
/// </summary>
public Uri Destination { get; }
/// <summary>
/// When the violation occurred.
/// </summary>
public DateTimeOffset OccurredAt { get; }
/// <summary>
/// Structured payload with remediation guidance.
/// </summary>
public AirGapEgressBlockedPayload Payload { get; }
}

View File

@@ -0,0 +1,164 @@
using System.Collections.Immutable;
namespace StellaOps.Concelier.Core.AirGap.Models;
/// <summary>
/// Structured payload for AIRGAP_EGRESS_BLOCKED events.
/// Per CONCELIER-WEB-AIRGAP-57-001.
/// </summary>
public sealed record AirGapEgressBlockedPayload
{
/// <summary>
/// Error code for this violation type.
/// </summary>
public const string ErrorCode = "AIRGAP_EGRESS_BLOCKED";
/// <summary>
/// Source name that attempted the egress.
/// </summary>
public required string SourceName { get; init; }
/// <summary>
/// Destination URI that was blocked.
/// </summary>
public required string Destination { get; init; }
/// <summary>
/// Host portion of the destination.
/// </summary>
public required string DestinationHost { get; init; }
/// <summary>
/// Reason for blocking.
/// </summary>
public required string Reason { get; init; }
/// <summary>
/// Timestamp when the violation occurred.
/// </summary>
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Whether this was actually blocked (vs. warn-only mode).
/// </summary>
public required bool WasBlocked { get; init; }
/// <summary>
/// Remediation guidance for the operator.
/// </summary>
public required AirGapRemediationGuidance Remediation { get; init; }
/// <summary>
/// Creates a payload from a violation exception.
/// </summary>
public static AirGapEgressBlockedPayload FromViolation(
string sourceName,
Uri destination,
DateTimeOffset occurredAt,
bool wasBlocked)
{
return new AirGapEgressBlockedPayload
{
SourceName = sourceName,
Destination = destination.ToString(),
DestinationHost = destination.Host,
Reason = $"Source '{sourceName}' is not in the allowed sources list and host '{destination.Host}' is not in the allowed hosts list.",
OccurredAt = occurredAt,
WasBlocked = wasBlocked,
Remediation = AirGapRemediationGuidance.ForEgressBlocked(sourceName, destination.Host)
};
}
}
/// <summary>
/// Remediation guidance for air-gap violations.
/// Per CONCELIER-WEB-AIRGAP-57-001.
/// </summary>
public sealed record AirGapRemediationGuidance
{
/// <summary>
/// Short summary of what to do.
/// </summary>
public required string Summary { get; init; }
/// <summary>
/// Detailed steps to remediate the issue.
/// </summary>
public required ImmutableArray<RemediationStep> Steps { get; init; }
/// <summary>
/// Configuration keys that can be modified to allow this access.
/// </summary>
public required ImmutableArray<ConfigurationHint> ConfigurationHints { get; init; }
/// <summary>
/// Links to relevant documentation.
/// </summary>
public required ImmutableArray<DocumentationLink> DocumentationLinks { get; init; }
/// <summary>
/// Creates remediation guidance for an egress blocked violation.
/// </summary>
public static AirGapRemediationGuidance ForEgressBlocked(string sourceName, string host)
{
return new AirGapRemediationGuidance
{
Summary = $"Add '{sourceName}' to allowed sources or '{host}' to allowed hosts to permit this access.",
Steps = ImmutableArray.Create(
new RemediationStep(
Order: 1,
Action: "Review the blocked access",
Description: $"Verify that '{sourceName}' should be allowed to access '{host}' based on your security policy."),
new RemediationStep(
Order: 2,
Action: "Update configuration",
Description: "Add the source or host to the appropriate allowlist in your configuration."),
new RemediationStep(
Order: 3,
Action: "Restart or reload",
Description: "Restart the service or trigger a configuration reload for changes to take effect.")
),
ConfigurationHints = ImmutableArray.Create(
new ConfigurationHint(
Key: "Concelier:AirGap:SealedMode:AllowedSources",
Description: $"Add '{sourceName}' to this list to allow the source.",
Example: $"[\"{sourceName}\"]"),
new ConfigurationHint(
Key: "Concelier:AirGap:SealedMode:AllowedHosts",
Description: $"Add '{host}' to this list to allow the destination host.",
Example: $"[\"{host}\"]")
),
DocumentationLinks = ImmutableArray.Create(
new DocumentationLink(
Title: "Air-Gap Configuration Guide",
Url: "https://docs.stellaops.org/concelier/airgap/configuration"),
new DocumentationLink(
Title: "Sealed Mode Reference",
Url: "https://docs.stellaops.org/concelier/airgap/sealed-mode")
)
};
}
}
/// <summary>
/// A remediation step.
/// </summary>
public sealed record RemediationStep(
int Order,
string Action,
string Description);
/// <summary>
/// A configuration hint for remediation.
/// </summary>
public sealed record ConfigurationHint(
string Key,
string Description,
string Example);
/// <summary>
/// A link to documentation.
/// </summary>
public sealed record DocumentationLink(
string Title,
string Url);

View File

@@ -0,0 +1,161 @@
namespace StellaOps.Concelier.Core.AirGap.Models;
/// <summary>
/// Timeline event emitted when a bundle is imported.
/// Per CONCELIER-WEB-AIRGAP-58-001.
/// </summary>
public sealed record BundleImportTimelineEvent
{
/// <summary>
/// Event type identifier.
/// </summary>
public const string EventType = "airgap.bundle.imported";
/// <summary>
/// Unique event identifier.
/// </summary>
public required Guid EventId { get; init; }
/// <summary>
/// Type of the event (always "airgap.bundle.imported").
/// </summary>
public string Type => EventType;
/// <summary>
/// Tenant that owns this import.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Bundle identifier.
/// </summary>
public required string BundleId { get; init; }
/// <summary>
/// Source that provided the bundle.
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Bundle type (advisory, vex, sbom, etc.).
/// </summary>
public required string BundleType { get; init; }
/// <summary>
/// Scope of the import (full, delta, patch).
/// </summary>
public required BundleImportScope Scope { get; init; }
/// <summary>
/// Actor who performed the import.
/// </summary>
public required BundleImportActor Actor { get; init; }
/// <summary>
/// Import statistics.
/// </summary>
public required BundleImportStats Stats { get; init; }
/// <summary>
/// Evidence bundle reference if applicable.
/// </summary>
public string? EvidenceBundleRef { get; init; }
/// <summary>
/// Content hash of the imported bundle.
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// When the import occurred.
/// </summary>
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Correlation trace ID for distributed tracing.
/// </summary>
public string? TraceId { get; init; }
}
/// <summary>
/// Scope of the bundle import.
/// </summary>
public enum BundleImportScope
{
/// <summary>
/// Full import replacing all existing data.
/// </summary>
Full,
/// <summary>
/// Delta import with only changes.
/// </summary>
Delta,
/// <summary>
/// Patch import for specific corrections.
/// </summary>
Patch
}
/// <summary>
/// Actor information for the import.
/// </summary>
public sealed record BundleImportActor
{
/// <summary>
/// Actor identifier.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Actor type (user, service, system).
/// </summary>
public required string Type { get; init; }
/// <summary>
/// Actor display name.
/// </summary>
public string? DisplayName { get; init; }
}
/// <summary>
/// Statistics for the bundle import.
/// </summary>
public sealed record BundleImportStats
{
/// <summary>
/// Total items in the bundle.
/// </summary>
public int TotalItems { get; init; }
/// <summary>
/// Items added during import.
/// </summary>
public int ItemsAdded { get; init; }
/// <summary>
/// Items updated during import.
/// </summary>
public int ItemsUpdated { get; init; }
/// <summary>
/// Items removed during import.
/// </summary>
public int ItemsRemoved { get; init; }
/// <summary>
/// Items skipped (unchanged).
/// </summary>
public int ItemsSkipped { get; init; }
/// <summary>
/// Duration of the import in milliseconds.
/// </summary>
public long DurationMs { get; init; }
/// <summary>
/// Bundle size in bytes.
/// </summary>
public long SizeBytes { get; init; }
}

View File

@@ -72,7 +72,7 @@ public sealed class SealedModeEnforcer : ISealedModeEnforcer
"Sealed mode violation blocked: source '{SourceName}' attempted to access '{Destination}'",
sourceName, destination);
throw new SealedModeViolationException(sourceName, destination);
throw new SealedModeViolationException(sourceName, destination, _timeProvider.GetUtcNow());
}
/// <inheritdoc />

View File

@@ -0,0 +1,355 @@
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.Aoc;
using StellaOps.Concelier.Core.Aoc;
namespace StellaOps.Concelier.WebService.Tests.Aoc;
/// <summary>
/// Regression tests ensuring AOC verify consistently emits ERR_AOC_001 and maintains
/// mapper/guard parity across all violation scenarios.
/// Per CONCELIER-WEB-AOC-19-007.
/// </summary>
public sealed class AocVerifyRegressionTests
{
private static readonly AocGuardOptions GuardOptions = AocGuardOptions.Default;
[Fact]
public void Verify_ForbiddenField_EmitsErrAoc001()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithForbiddenField("severity", "high");
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
var violation = Assert.Single(result.Violations.Where(v => v.Path == "/severity"));
Assert.Equal("ERR_AOC_001", violation.ErrorCode);
Assert.Equal(AocViolationCode.ForbiddenField, violation.Code);
}
[Theory]
[InlineData("severity")]
[InlineData("cvss")]
[InlineData("cvss_vector")]
[InlineData("merged_from")]
[InlineData("consensus_provider")]
[InlineData("reachability")]
[InlineData("asset_criticality")]
[InlineData("risk_score")]
public void Verify_AllForbiddenFields_EmitErrAoc001(string forbiddenField)
{
var guard = new AocWriteGuard();
var json = CreateJsonWithForbiddenField(forbiddenField, "forbidden_value");
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
var violation = result.Violations.FirstOrDefault(v => v.Path == $"/{forbiddenField}");
Assert.NotNull(violation);
Assert.Equal("ERR_AOC_001", violation.ErrorCode);
Assert.Equal(AocViolationCode.ForbiddenField, violation.Code);
}
[Fact]
public void Verify_DerivedField_EmitsErrAoc006()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithDerivedField("effective_status", "affected");
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
var violation = result.Violations.FirstOrDefault(v =>
v.Path == "/effective_status" && v.ErrorCode == "ERR_AOC_006");
Assert.NotNull(violation);
Assert.Equal(AocViolationCode.DerivedFindingDetected, violation.Code);
}
[Theory]
[InlineData("effective_status")]
[InlineData("effective_range")]
[InlineData("effective_severity")]
[InlineData("effective_cvss")]
public void Verify_AllDerivedFields_EmitErrAoc006(string derivedField)
{
var guard = new AocWriteGuard();
var json = CreateJsonWithDerivedField(derivedField, "derived_value");
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
var violation = result.Violations.FirstOrDefault(v =>
v.Path == $"/{derivedField}" && v.ErrorCode == "ERR_AOC_006");
Assert.NotNull(violation);
Assert.Equal(AocViolationCode.DerivedFindingDetected, violation.Code);
}
[Fact]
public void Verify_UnknownField_EmitsErrAoc007()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithUnknownField("completely_unknown_field", "some_value");
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
var violation = Assert.Single(result.Violations.Where(v =>
v.Path == "/completely_unknown_field" && v.ErrorCode == "ERR_AOC_007"));
Assert.Equal(AocViolationCode.UnknownField, violation.Code);
}
[Fact]
public void Verify_MergeAttempt_EmitsErrAoc002()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithMergedFrom(["obs-1", "obs-2"]);
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
// merged_from triggers ERR_AOC_001 (forbidden field)
var violation = result.Violations.FirstOrDefault(v => v.Path == "/merged_from");
Assert.NotNull(violation);
Assert.Equal("ERR_AOC_001", violation.ErrorCode);
}
[Fact]
public void Verify_MultipleViolations_EmitsAllErrorCodes()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithMultipleViolations();
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.False(result.IsValid);
// Should have ERR_AOC_001 for forbidden field
Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_001");
// Should have ERR_AOC_006 for derived field
Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_006");
// Should have ERR_AOC_007 for unknown field
Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_007");
}
[Fact]
public void Verify_ValidDocument_NoViolations()
{
var guard = new AocWriteGuard();
var json = CreateValidJson();
var result = guard.Validate(json.RootElement, GuardOptions);
Assert.True(result.IsValid);
Assert.Empty(result.Violations);
}
[Fact]
public void Verify_ErrorCodeConsistency_AcrossMultipleRuns()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithForbiddenField("severity", "critical");
// Run validation multiple times
var results = Enumerable.Range(0, 10)
.Select(_ => guard.Validate(json.RootElement, GuardOptions))
.ToList();
// All should produce same error code
var allErrorCodes = results
.SelectMany(r => r.Violations)
.Select(v => v.ErrorCode)
.Distinct()
.ToList();
Assert.Single(allErrorCodes);
Assert.Equal("ERR_AOC_001", allErrorCodes[0]);
}
[Fact]
public void Verify_PathConsistency_AcrossMultipleRuns()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithForbiddenField("cvss", "9.8");
// Run validation multiple times
var results = Enumerable.Range(0, 10)
.Select(_ => guard.Validate(json.RootElement, GuardOptions))
.ToList();
// All should produce same path
var allPaths = results
.SelectMany(r => r.Violations)
.Select(v => v.Path)
.Distinct()
.ToList();
Assert.Single(allPaths);
Assert.Equal("/cvss", allPaths[0]);
}
[Fact]
public void Verify_MapperGuardParity_ValidationResultsMatch()
{
var guard = new AocWriteGuard();
var validator = new AdvisorySchemaValidator(guard, Options.Create(GuardOptions));
// Create document with forbidden field
var json = CreateJsonWithForbiddenField("severity", "high");
// Validate with guard directly
var guardResult = guard.Validate(json.RootElement, GuardOptions);
// Both should detect the violation
Assert.False(guardResult.IsValid);
Assert.Contains(guardResult.Violations, v =>
v.ErrorCode == "ERR_AOC_001" && v.Path == "/severity");
}
[Fact]
public void Verify_ViolationMessage_ContainsMeaningfulDetails()
{
var guard = new AocWriteGuard();
var json = CreateJsonWithForbiddenField("severity", "high");
var result = guard.Validate(json.RootElement, GuardOptions);
var violation = result.Violations.First(v => v.ErrorCode == "ERR_AOC_001");
// Message should not be empty
Assert.False(string.IsNullOrWhiteSpace(violation.Message));
// Path should be correct
Assert.Equal("/severity", violation.Path);
}
private static JsonDocument CreateJsonWithForbiddenField(string field, string value)
{
return JsonDocument.Parse($$"""
{
"tenant": "test",
"{{field}}": "{{value}}",
"source": {"vendor": "test", "connector": "test", "version": "1.0"},
"upstream": {
"upstream_id": "CVE-2024-0001",
"content_hash": "sha256:abc",
"retrieved_at": "2024-01-01T00:00:00Z",
"signature": {"present": false},
"provenance": {}
},
"content": {"format": "OSV", "raw": {}},
"identifiers": {"aliases": [], "primary": "CVE-2024-0001"},
"linkset": {}
}
""");
}
private static JsonDocument CreateJsonWithDerivedField(string field, string value)
{
return JsonDocument.Parse($$"""
{
"tenant": "test",
"{{field}}": "{{value}}",
"source": {"vendor": "test", "connector": "test", "version": "1.0"},
"upstream": {
"upstream_id": "CVE-2024-0001",
"content_hash": "sha256:abc",
"retrieved_at": "2024-01-01T00:00:00Z",
"signature": {"present": false},
"provenance": {}
},
"content": {"format": "OSV", "raw": {}},
"identifiers": {"aliases": [], "primary": "CVE-2024-0001"},
"linkset": {}
}
""");
}
private static JsonDocument CreateJsonWithUnknownField(string field, string value)
{
return JsonDocument.Parse($$"""
{
"tenant": "test",
"{{field}}": "{{value}}",
"source": {"vendor": "test", "connector": "test", "version": "1.0"},
"upstream": {
"upstream_id": "CVE-2024-0001",
"content_hash": "sha256:abc",
"retrieved_at": "2024-01-01T00:00:00Z",
"signature": {"present": false},
"provenance": {}
},
"content": {"format": "OSV", "raw": {}},
"identifiers": {"aliases": [], "primary": "CVE-2024-0001"},
"linkset": {}
}
""");
}
private static JsonDocument CreateJsonWithMergedFrom(string[] mergedFrom)
{
var mergedArray = string.Join(", ", mergedFrom.Select(m => $"\"{m}\""));
return JsonDocument.Parse($$"""
{
"tenant": "test",
"merged_from": [{{mergedArray}}],
"source": {"vendor": "test", "connector": "test", "version": "1.0"},
"upstream": {
"upstream_id": "CVE-2024-0001",
"content_hash": "sha256:abc",
"retrieved_at": "2024-01-01T00:00:00Z",
"signature": {"present": false},
"provenance": {}
},
"content": {"format": "OSV", "raw": {}},
"identifiers": {"aliases": [], "primary": "CVE-2024-0001"},
"linkset": {}
}
""");
}
private static JsonDocument CreateJsonWithMultipleViolations()
{
return JsonDocument.Parse("""
{
"tenant": "test",
"severity": "high",
"effective_status": "affected",
"unknown_custom_field": "value",
"source": {"vendor": "test", "connector": "test", "version": "1.0"},
"upstream": {
"upstream_id": "CVE-2024-0001",
"content_hash": "sha256:abc",
"retrieved_at": "2024-01-01T00:00:00Z",
"signature": {"present": false},
"provenance": {}
},
"content": {"format": "OSV", "raw": {}},
"identifiers": {"aliases": [], "primary": "CVE-2024-0001"},
"linkset": {}
}
""");
}
private static JsonDocument CreateValidJson()
{
return JsonDocument.Parse("""
{
"tenant": "test",
"source": {"vendor": "test", "connector": "test", "version": "1.0"},
"upstream": {
"upstream_id": "CVE-2024-0001",
"content_hash": "sha256:abc",
"retrieved_at": "2024-01-01T00:00:00Z",
"signature": {"present": false},
"provenance": {}
},
"content": {"format": "OSV", "raw": {}},
"identifiers": {"aliases": [], "primary": "CVE-2024-0001"},
"linkset": {}
}
""");
}
}

View File

@@ -0,0 +1,315 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.Aoc;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.RawModels;
namespace StellaOps.Concelier.WebService.Tests.Aoc;
/// <summary>
/// Integration tests for large-batch ingest reproducibility.
/// Per CONCELIER-WEB-AOC-19-004.
/// </summary>
public sealed class LargeBatchIngestTests
{
private static readonly AocGuardOptions GuardOptions = AocGuardOptions.Default;
[Fact]
public void LargeBatch_ValidDocuments_AllPassValidation()
{
var validator = CreateValidator();
var documents = GenerateValidDocuments(1000);
var results = documents.Select(validator.ValidateSchema).ToList();
Assert.All(results, r => Assert.True(r.IsValid));
}
[Fact]
public void LargeBatch_MixedDocuments_DetectsViolationsReproducibly()
{
var validator = CreateValidator();
var (validDocs, invalidDocs) = GenerateMixedBatch(500, 500);
var allDocs = validDocs.Concat(invalidDocs).ToList();
// First pass
var results1 = allDocs.Select(validator.ValidateSchema).ToList();
// Second pass (same order)
var results2 = allDocs.Select(validator.ValidateSchema).ToList();
// Results should be identical (reproducible)
for (int i = 0; i < results1.Count; i++)
{
Assert.Equal(results1[i].IsValid, results2[i].IsValid);
Assert.Equal(results1[i].Violations.Count, results2[i].Violations.Count);
}
}
[Fact]
public void LargeBatch_DeterministicViolationOrdering()
{
var validator = CreateValidator();
var documents = GenerateDocumentsWithMultipleViolations(100);
// Run validation twice
var results1 = documents.Select(validator.ValidateSchema).ToList();
var results2 = documents.Select(validator.ValidateSchema).ToList();
// Violations should be in same order
for (int i = 0; i < results1.Count; i++)
{
var violations1 = results1[i].Violations;
var violations2 = results2[i].Violations;
Assert.Equal(violations1.Count, violations2.Count);
for (int j = 0; j < violations1.Count; j++)
{
Assert.Equal(violations1[j].ErrorCode, violations2[j].ErrorCode);
Assert.Equal(violations1[j].Path, violations2[j].Path);
}
}
}
[Fact]
public void LargeBatch_ParallelValidation_Reproducible()
{
var validator = CreateValidator();
var documents = GenerateValidDocuments(1000);
// Sequential validation
var sequentialResults = documents.Select(validator.ValidateSchema).ToList();
// Parallel validation
var parallelResults = documents.AsParallel()
.AsOrdered()
.Select(validator.ValidateSchema)
.ToList();
// Results should be identical
Assert.Equal(sequentialResults.Count, parallelResults.Count);
for (int i = 0; i < sequentialResults.Count; i++)
{
Assert.Equal(sequentialResults[i].IsValid, parallelResults[i].IsValid);
}
}
[Fact]
public void LargeBatch_ContentHashConsistency()
{
var documents = GenerateValidDocuments(100);
var hashes1 = documents.Select(ComputeDocumentHash).ToList();
var hashes2 = documents.Select(ComputeDocumentHash).ToList();
// Hashes should be identical for same documents
for (int i = 0; i < hashes1.Count; i++)
{
Assert.Equal(hashes1[i], hashes2[i]);
}
}
[Theory]
[InlineData(100)]
[InlineData(500)]
[InlineData(1000)]
public void LargeBatch_ScalesLinearly(int batchSize)
{
var validator = CreateValidator();
var documents = GenerateValidDocuments(batchSize);
var sw = System.Diagnostics.Stopwatch.StartNew();
var results = documents.Select(validator.ValidateSchema).ToList();
sw.Stop();
// All should pass
Assert.Equal(batchSize, results.Count);
Assert.All(results, r => Assert.True(r.IsValid));
// Should complete in reasonable time (less than 100ms per 100 docs)
var expectedMaxMs = batchSize;
Assert.True(sw.ElapsedMilliseconds < expectedMaxMs,
$"Validation took {sw.ElapsedMilliseconds}ms for {batchSize} docs (expected < {expectedMaxMs}ms)");
}
[Fact]
public void LargeBatch_ViolationCounts_Deterministic()
{
var validator = CreateValidator();
// Generate same batch twice
var batch1 = GenerateMixedBatch(250, 250);
var batch2 = GenerateMixedBatch(250, 250);
var allDocs1 = batch1.Valid.Concat(batch1.Invalid).ToList();
var allDocs2 = batch2.Valid.Concat(batch2.Invalid).ToList();
var results1 = allDocs1.Select(validator.ValidateSchema).ToList();
var results2 = allDocs2.Select(validator.ValidateSchema).ToList();
// Same generation should produce same violation counts
var validCount1 = results1.Count(r => r.IsValid);
var validCount2 = results2.Count(r => r.IsValid);
var violationCount1 = results1.Sum(r => r.Violations.Count);
var violationCount2 = results2.Sum(r => r.Violations.Count);
Assert.Equal(validCount1, validCount2);
Assert.Equal(violationCount1, violationCount2);
}
private static AdvisorySchemaValidator CreateValidator()
=> new(new AocWriteGuard(), Options.Create(GuardOptions));
private static List<AdvisoryRawDocument> GenerateValidDocuments(int count)
{
var documents = new List<AdvisoryRawDocument>(count);
for (int i = 0; i < count; i++)
{
documents.Add(CreateValidDocument($"tenant-{i % 10}", $"GHSA-{i:0000}"));
}
return documents;
}
private static (List<AdvisoryRawDocument> Valid, List<AdvisoryRawDocument> Invalid) GenerateMixedBatch(
int validCount, int invalidCount)
{
var valid = GenerateValidDocuments(validCount);
var invalid = GenerateInvalidDocuments(invalidCount);
return (valid, invalid);
}
private static List<AdvisoryRawDocument> GenerateInvalidDocuments(int count)
{
var documents = new List<AdvisoryRawDocument>(count);
for (int i = 0; i < count; i++)
{
documents.Add(CreateDocumentWithForbiddenField($"tenant-{i % 10}", $"CVE-{i:0000}"));
}
return documents;
}
private static List<AdvisoryRawDocument> GenerateDocumentsWithMultipleViolations(int count)
{
var documents = new List<AdvisoryRawDocument>(count);
for (int i = 0; i < count; i++)
{
documents.Add(CreateDocumentWithMultipleViolations($"tenant-{i % 10}", $"CVE-MULTI-{i:0000}"));
}
return documents;
}
private static AdvisoryRawDocument CreateValidDocument(string tenant, string advisoryId)
{
using var rawDocument = JsonDocument.Parse($$"""{"id":"{{advisoryId}}"}""");
return new AdvisoryRawDocument(
Tenant: tenant,
Source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"),
Upstream: new RawUpstreamMetadata(
UpstreamId: advisoryId,
DocumentVersion: "1",
RetrievedAt: DateTimeOffset.UtcNow,
ContentHash: $"sha256:{advisoryId}",
Signature: new RawSignatureMetadata(false),
Provenance: ImmutableDictionary<string, string>.Empty),
Content: new RawContent(
Format: "OSV",
SpecVersion: "1.0",
Raw: rawDocument.RootElement.Clone()),
Identifiers: new RawIdentifiers(
Aliases: ImmutableArray.Create(advisoryId),
PrimaryId: advisoryId),
Linkset: new RawLinkset
{
Aliases = ImmutableArray<string>.Empty,
PackageUrls = ImmutableArray<string>.Empty,
Cpes = ImmutableArray<string>.Empty,
References = ImmutableArray<RawReference>.Empty,
ReconciledFrom = ImmutableArray<string>.Empty,
Notes = ImmutableDictionary<string, string>.Empty
},
Links: ImmutableArray<RawLink>.Empty);
}
private static AdvisoryRawDocument CreateDocumentWithForbiddenField(string tenant, string advisoryId)
{
// Create document with forbidden "severity" field
using var rawDocument = JsonDocument.Parse($$"""{"id":"{{advisoryId}}","severity":"high"}""");
return new AdvisoryRawDocument(
Tenant: tenant,
Source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"),
Upstream: new RawUpstreamMetadata(
UpstreamId: advisoryId,
DocumentVersion: "1",
RetrievedAt: DateTimeOffset.UtcNow,
ContentHash: $"sha256:{advisoryId}",
Signature: new RawSignatureMetadata(false),
Provenance: ImmutableDictionary<string, string>.Empty),
Content: new RawContent(
Format: "OSV",
SpecVersion: "1.0",
Raw: rawDocument.RootElement.Clone()),
Identifiers: new RawIdentifiers(
Aliases: ImmutableArray.Create(advisoryId),
PrimaryId: advisoryId),
Linkset: new RawLinkset
{
Aliases = ImmutableArray<string>.Empty,
PackageUrls = ImmutableArray<string>.Empty,
Cpes = ImmutableArray<string>.Empty,
References = ImmutableArray<RawReference>.Empty,
ReconciledFrom = ImmutableArray<string>.Empty,
Notes = ImmutableDictionary<string, string>.Empty
},
Links: ImmutableArray<RawLink>.Empty);
}
private static AdvisoryRawDocument CreateDocumentWithMultipleViolations(string tenant, string advisoryId)
{
// Create document with multiple violations: forbidden, derived, and unknown fields
using var rawDocument = JsonDocument.Parse($$"""
{
"id": "{{advisoryId}}",
"severity": "high",
"effective_status": "affected",
"unknown_field": "value"
}
""");
return new AdvisoryRawDocument(
Tenant: tenant,
Source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"),
Upstream: new RawUpstreamMetadata(
UpstreamId: advisoryId,
DocumentVersion: "1",
RetrievedAt: DateTimeOffset.UtcNow,
ContentHash: $"sha256:{advisoryId}",
Signature: new RawSignatureMetadata(false),
Provenance: ImmutableDictionary<string, string>.Empty),
Content: new RawContent(
Format: "OSV",
SpecVersion: "1.0",
Raw: rawDocument.RootElement.Clone()),
Identifiers: new RawIdentifiers(
Aliases: ImmutableArray.Create(advisoryId),
PrimaryId: advisoryId),
Linkset: new RawLinkset
{
Aliases = ImmutableArray<string>.Empty,
PackageUrls = ImmutableArray<string>.Empty,
Cpes = ImmutableArray<string>.Empty,
References = ImmutableArray<RawReference>.Empty,
ReconciledFrom = ImmutableArray<string>.Empty,
Notes = ImmutableDictionary<string, string>.Empty
},
Links: ImmutableArray<RawLink>.Empty);
}
private static string ComputeDocumentHash(AdvisoryRawDocument doc)
{
// Simple hash combining key fields
var data = $"{doc.Tenant}|{doc.Upstream.UpstreamId}|{doc.Upstream.ContentHash}";
using var sha = System.Security.Cryptography.SHA256.Create();
var bytes = System.Text.Encoding.UTF8.GetBytes(data);
var hash = sha.ComputeHash(bytes);
return Convert.ToHexStringLower(hash);
}
}

View File

@@ -0,0 +1,125 @@
using StellaOps.Concelier.WebService.Tests.Fixtures;
namespace StellaOps.Concelier.WebService.Tests.Aoc;
/// <summary>
/// Tests for tenant allowlist enforcement.
/// Per CONCELIER-WEB-AOC-19-006.
/// </summary>
public sealed class TenantAllowlistTests
{
[Theory]
[InlineData("test-tenant")]
[InlineData("dev-tenant")]
[InlineData("tenant-123")]
[InlineData("a")]
[InlineData("tenant-with-dashes-in-name")]
public void ValidateTenantId_ValidTenant_ReturnsValid(string tenantId)
{
var (isValid, error) = AuthTenantTestFixtures.ValidateTenantId(tenantId);
Assert.True(isValid);
Assert.Null(error);
}
[Theory]
[InlineData("", "cannot be null or empty")]
[InlineData("Test-Tenant", "invalid character 'T'")] // Uppercase
[InlineData("test_tenant", "invalid character '_'")] // Underscore
[InlineData("test.tenant", "invalid character '.'")] // Dot
[InlineData("test tenant", "invalid character ' '")] // Space
[InlineData("test@tenant", "invalid character '@'")] // Special char
public void ValidateTenantId_InvalidTenant_ReturnsError(string tenantId, string expectedErrorPart)
{
var (isValid, error) = AuthTenantTestFixtures.ValidateTenantId(tenantId);
Assert.False(isValid);
Assert.NotNull(error);
Assert.Contains(expectedErrorPart, error, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void ValidateTenantId_TooLong_ReturnsError()
{
var longTenant = new string('a', 65); // 65 chars, max is 64
var (isValid, error) = AuthTenantTestFixtures.ValidateTenantId(longTenant);
Assert.False(isValid);
Assert.Contains("exceeds maximum length", error);
}
[Fact]
public void ValidateTenantId_MaxLength_ReturnsValid()
{
var maxTenant = new string('a', 64); // Exactly 64 chars
var (isValid, error) = AuthTenantTestFixtures.ValidateTenantId(maxTenant);
Assert.True(isValid);
Assert.Null(error);
}
[Fact]
public void CreateDefaultAuthorityConfig_ContainsAllTestTenants()
{
var config = AuthTenantTestFixtures.CreateDefaultAuthorityConfig();
Assert.NotEmpty(config.RequiredTenants);
Assert.Contains(AuthTenantTestFixtures.ValidTenants.TestTenant, config.RequiredTenants);
Assert.Contains(AuthTenantTestFixtures.ValidTenants.ChunkTestTenant, config.RequiredTenants);
Assert.Contains(AuthTenantTestFixtures.ValidTenants.AocTestTenant, config.RequiredTenants);
}
[Fact]
public void CreateSingleTenantConfig_ContainsOnlySpecifiedTenant()
{
var tenant = "single-test";
var config = AuthTenantTestFixtures.CreateSingleTenantConfig(tenant);
Assert.Single(config.RequiredTenants);
Assert.Equal(tenant, config.RequiredTenants[0]);
}
[Fact]
public void AllValidTenants_PassValidation()
{
foreach (var tenant in AuthTenantTestFixtures.ValidTenants.AllTestTenants)
{
var (isValid, error) = AuthTenantTestFixtures.ValidateTenantId(tenant);
Assert.True(isValid, $"Tenant '{tenant}' should be valid but got error: {error}");
}
}
[Fact]
public void AllInvalidTenants_FailValidation()
{
foreach (var tenant in AuthTenantTestFixtures.InvalidTenants.AllInvalidTenants)
{
var (isValid, _) = AuthTenantTestFixtures.ValidateTenantId(tenant);
Assert.False(isValid, $"Tenant '{tenant}' should be invalid");
}
}
[Fact]
public void AuthorityTestConfiguration_DefaultValuesAreSet()
{
var config = AuthTenantTestFixtures.CreateAuthorityConfig("test");
Assert.True(config.Enabled);
Assert.Equal("concelier-api", config.Audience);
Assert.Equal("https://test-authority.stellaops.local", config.Issuer);
}
[Fact]
public void SeedDataFixtures_UseTenantsThatPassValidation()
{
// Verify that seed data fixtures use valid tenant IDs
var chunkSeedTenant = AdvisoryChunkSeedData.DefaultTenant;
var (isValid, error) = AuthTenantTestFixtures.ValidateTenantId(chunkSeedTenant);
Assert.True(isValid, $"Chunk seed tenant '{chunkSeedTenant}' should be valid but got error: {error}");
}
}

View File

@@ -0,0 +1,411 @@
using System.Collections.Immutable;
using System.Text.Json;
using MongoDB.Bson.Serialization.Attributes;
using StellaOps.Concelier.RawModels;
namespace StellaOps.Concelier.WebService.Tests.Fixtures;
/// <summary>
/// Seed data fixtures for /advisories/{key}/chunks endpoint tests.
/// Per CONCELIER-WEB-AOC-19-005.
/// </summary>
public static class AdvisoryChunkSeedData
{
public const string DefaultTenant = "chunk-test-tenant";
/// <summary>
/// Creates a complete set of seed documents for testing the chunks endpoint.
/// </summary>
public static AdvisoryChunkSeedSet CreateSeedSet(string tenant = DefaultTenant)
{
var advisories = CreateAdvisories(tenant);
var observations = CreateObservations(tenant);
var aliases = CreateAliases(tenant);
var rawDocuments = CreateRawDocuments(tenant);
return new AdvisoryChunkSeedSet(advisories, observations, aliases, rawDocuments);
}
/// <summary>
/// Advisory documents for seed data.
/// </summary>
public static IReadOnlyList<AdvisorySeedDocument> CreateAdvisories(string tenant = DefaultTenant)
{
return new[]
{
new AdvisorySeedDocument
{
TenantId = tenant,
AdvisoryKey = "CVE-2024-0001",
Source = "nvd",
Severity = "critical",
Title = "Remote Code Execution in Example Package",
Description = "A critical vulnerability allows remote attackers to execute arbitrary code.",
Published = new DateTime(2024, 1, 15, 0, 0, 0, DateTimeKind.Utc),
Modified = new DateTime(2024, 1, 20, 0, 0, 0, DateTimeKind.Utc),
Fingerprint = ComputeFingerprint("CVE-2024-0001", "nvd")
},
new AdvisorySeedDocument
{
TenantId = tenant,
AdvisoryKey = "CVE-2024-0002",
Source = "github",
Severity = "high",
Title = "SQL Injection in Database Layer",
Description = "SQL injection vulnerability in the database abstraction layer.",
Published = new DateTime(2024, 2, 1, 0, 0, 0, DateTimeKind.Utc),
Modified = new DateTime(2024, 2, 5, 0, 0, 0, DateTimeKind.Utc),
Fingerprint = ComputeFingerprint("CVE-2024-0002", "github")
},
new AdvisorySeedDocument
{
TenantId = tenant,
AdvisoryKey = "GHSA-xxxx-yyyy-zzzz",
Source = "github",
Severity = "medium",
Title = "Cross-Site Scripting in Frontend",
Description = "Stored XSS vulnerability in user profile fields.",
Published = new DateTime(2024, 3, 10, 0, 0, 0, DateTimeKind.Utc),
Modified = new DateTime(2024, 3, 15, 0, 0, 0, DateTimeKind.Utc),
Fingerprint = ComputeFingerprint("GHSA-xxxx-yyyy-zzzz", "github")
}
};
}
/// <summary>
/// Observation documents for seed data.
/// </summary>
public static IReadOnlyList<ObservationSeedDocument> CreateObservations(string tenant = DefaultTenant)
{
return new[]
{
// CVE-2024-0001 observations
new ObservationSeedDocument
{
TenantId = tenant,
ObservationId = "obs-001-nvd",
AdvisoryKey = "CVE-2024-0001",
Source = "nvd",
Format = "OSV",
RawContent = CreateRawContent("CVE-2024-0001", "nvd", "critical"),
CreatedAt = new DateTime(2024, 1, 15, 10, 0, 0, DateTimeKind.Utc)
},
new ObservationSeedDocument
{
TenantId = tenant,
ObservationId = "obs-001-github",
AdvisoryKey = "CVE-2024-0001",
Source = "github",
Format = "OSV",
RawContent = CreateRawContent("CVE-2024-0001", "github", "critical"),
CreatedAt = new DateTime(2024, 1, 16, 10, 0, 0, DateTimeKind.Utc)
},
// CVE-2024-0002 observations
new ObservationSeedDocument
{
TenantId = tenant,
ObservationId = "obs-002-github",
AdvisoryKey = "CVE-2024-0002",
Source = "github",
Format = "OSV",
RawContent = CreateRawContent("CVE-2024-0002", "github", "high"),
CreatedAt = new DateTime(2024, 2, 1, 10, 0, 0, DateTimeKind.Utc)
},
// GHSA observations
new ObservationSeedDocument
{
TenantId = tenant,
ObservationId = "obs-ghsa-001",
AdvisoryKey = "GHSA-xxxx-yyyy-zzzz",
Source = "github",
Format = "GHSA",
RawContent = CreateGhsaRawContent("GHSA-xxxx-yyyy-zzzz", "medium"),
CreatedAt = new DateTime(2024, 3, 10, 10, 0, 0, DateTimeKind.Utc)
}
};
}
/// <summary>
/// Alias documents for seed data.
/// </summary>
public static IReadOnlyList<AliasSeedDocument> CreateAliases(string tenant = DefaultTenant)
{
return new[]
{
new AliasSeedDocument
{
TenantId = tenant,
Alias = "CVE-2024-0001",
CanonicalId = "CVE-2024-0001",
Aliases = new[] { "CVE-2024-0001", "GHSA-aaaa-bbbb-cccc" }
},
new AliasSeedDocument
{
TenantId = tenant,
Alias = "GHSA-aaaa-bbbb-cccc",
CanonicalId = "CVE-2024-0001",
Aliases = new[] { "CVE-2024-0001", "GHSA-aaaa-bbbb-cccc" }
},
new AliasSeedDocument
{
TenantId = tenant,
Alias = "CVE-2024-0002",
CanonicalId = "CVE-2024-0002",
Aliases = new[] { "CVE-2024-0002" }
},
new AliasSeedDocument
{
TenantId = tenant,
Alias = "GHSA-xxxx-yyyy-zzzz",
CanonicalId = "GHSA-xxxx-yyyy-zzzz",
Aliases = new[] { "GHSA-xxxx-yyyy-zzzz" }
}
};
}
/// <summary>
/// Raw documents for seed data (these resolve to chunks).
/// </summary>
public static IReadOnlyList<AdvisoryRawDocument> CreateRawDocuments(string tenant = DefaultTenant)
{
var documents = new List<AdvisoryRawDocument>();
foreach (var obs in CreateObservations(tenant))
{
documents.Add(CreateRawDocumentFromObservation(obs, tenant));
}
return documents;
}
private static AdvisoryRawDocument CreateRawDocumentFromObservation(
ObservationSeedDocument obs,
string tenant)
{
using var jsonDoc = JsonDocument.Parse(obs.RawContent);
return new AdvisoryRawDocument(
Tenant: tenant,
Source: new RawSourceMetadata(obs.Source, "connector", "1.0.0"),
Upstream: new RawUpstreamMetadata(
UpstreamId: obs.AdvisoryKey,
DocumentVersion: "1",
RetrievedAt: obs.CreatedAt,
ContentHash: $"sha256:{ComputeHash(obs.RawContent)}",
Signature: new RawSignatureMetadata(false),
Provenance: ImmutableDictionary<string, string>.Empty),
Content: new RawContent(
Format: obs.Format,
SpecVersion: "1.0",
Raw: jsonDoc.RootElement.Clone()),
Identifiers: new RawIdentifiers(
Aliases: ImmutableArray.Create(obs.AdvisoryKey),
PrimaryId: obs.AdvisoryKey),
Linkset: new RawLinkset
{
Aliases = ImmutableArray<string>.Empty,
PackageUrls = ImmutableArray<string>.Empty,
Cpes = ImmutableArray<string>.Empty,
References = ImmutableArray<RawReference>.Empty,
ReconciledFrom = ImmutableArray<string>.Empty,
Notes = ImmutableDictionary<string, string>.Empty
},
Links: ImmutableArray<RawLink>.Empty);
}
private static string CreateRawContent(string advisoryId, string source, string severity)
{
return $$"""
{
"id": "{{advisoryId}}",
"modified": "2024-01-20T00:00:00Z",
"published": "2024-01-15T00:00:00Z",
"aliases": ["{{advisoryId}}"],
"summary": "Test vulnerability summary for {{advisoryId}}",
"details": "Detailed description of the vulnerability. This provides comprehensive information about the security issue, affected components, and potential impact. The vulnerability was discovered by security researchers and affects multiple versions of the software.",
"severity": [
{
"type": "CVSS_V3",
"score": "{{severity == "critical" ? "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" : severity == "high" ? "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:N" : "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N"}}"
}
],
"affected": [
{
"package": {
"ecosystem": "npm",
"name": "example-package"
},
"ranges": [
{
"type": "SEMVER",
"events": [
{"introduced": "0"},
{"fixed": "2.0.0"}
]
}
]
}
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/{{advisoryId}}"
}
],
"database_specific": {
"source": "{{source}}"
}
}
""";
}
private static string CreateGhsaRawContent(string ghsaId, string severity)
{
return $$"""
{
"id": "{{ghsaId}}",
"modified": "2024-03-15T00:00:00Z",
"published": "2024-03-10T00:00:00Z",
"aliases": ["{{ghsaId}}"],
"summary": "XSS vulnerability in frontend components",
"details": "A cross-site scripting (XSS) vulnerability exists in the frontend user interface. An attacker can inject malicious scripts through user profile fields that are not properly sanitized before rendering. This can lead to session hijacking, data theft, or defacement.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N"
}
],
"affected": [
{
"package": {
"ecosystem": "npm",
"name": "@example/frontend"
},
"ranges": [
{
"type": "SEMVER",
"events": [
{"introduced": "1.0.0"},
{"fixed": "1.5.3"}
]
}
]
}
],
"references": [
{
"type": "ADVISORY",
"url": "https://github.com/advisories/{{ghsaId}}"
}
],
"database_specific": {
"github_reviewed": true,
"github_reviewed_at": "2024-03-10T10:00:00Z",
"nvd_published_at": null
}
}
""";
}
private static string ComputeFingerprint(string advisoryKey, string source)
{
using var sha = System.Security.Cryptography.SHA256.Create();
var data = System.Text.Encoding.UTF8.GetBytes($"{advisoryKey}:{source}");
var hash = sha.ComputeHash(data);
return Convert.ToHexStringLower(hash)[..16];
}
private static string ComputeHash(string content)
{
using var sha = System.Security.Cryptography.SHA256.Create();
var data = System.Text.Encoding.UTF8.GetBytes(content);
var hash = sha.ComputeHash(data);
return Convert.ToHexStringLower(hash);
}
}
/// <summary>
/// Complete seed data set for chunks endpoint tests.
/// </summary>
public sealed record AdvisoryChunkSeedSet(
IReadOnlyList<AdvisorySeedDocument> Advisories,
IReadOnlyList<ObservationSeedDocument> Observations,
IReadOnlyList<AliasSeedDocument> Aliases,
IReadOnlyList<AdvisoryRawDocument> RawDocuments);
/// <summary>
/// Advisory document for seeding.
/// </summary>
public sealed class AdvisorySeedDocument
{
[BsonElement("tenantId")]
public string TenantId { get; init; } = string.Empty;
[BsonElement("advisoryKey")]
public string AdvisoryKey { get; init; } = string.Empty;
[BsonElement("source")]
public string Source { get; init; } = string.Empty;
[BsonElement("severity")]
public string Severity { get; init; } = string.Empty;
[BsonElement("title")]
public string Title { get; init; } = string.Empty;
[BsonElement("description")]
public string Description { get; init; } = string.Empty;
[BsonElement("published")]
public DateTime Published { get; init; }
[BsonElement("modified")]
public DateTime Modified { get; init; }
[BsonElement("fingerprint")]
public string Fingerprint { get; init; } = string.Empty;
}
/// <summary>
/// Observation document for seeding.
/// </summary>
public sealed class ObservationSeedDocument
{
[BsonElement("tenantId")]
public string TenantId { get; init; } = string.Empty;
[BsonElement("observationId")]
public string ObservationId { get; init; } = string.Empty;
[BsonElement("advisoryKey")]
public string AdvisoryKey { get; init; } = string.Empty;
[BsonElement("source")]
public string Source { get; init; } = string.Empty;
[BsonElement("format")]
public string Format { get; init; } = string.Empty;
[BsonElement("rawContent")]
public string RawContent { get; init; } = string.Empty;
[BsonElement("createdAt")]
public DateTime CreatedAt { get; init; }
}
/// <summary>
/// Alias document for seeding.
/// </summary>
public sealed class AliasSeedDocument
{
[BsonElement("tenantId")]
public string TenantId { get; init; } = string.Empty;
[BsonElement("alias")]
public string Alias { get; init; } = string.Empty;
[BsonElement("canonicalId")]
public string CanonicalId { get; init; } = string.Empty;
[BsonElement("aliases")]
public IReadOnlyList<string> Aliases { get; init; } = Array.Empty<string>();
}

View File

@@ -0,0 +1,124 @@
namespace StellaOps.Concelier.WebService.Tests.Fixtures;
/// <summary>
/// Test fixtures for auth/tenant configuration alignment.
/// Per CONCELIER-WEB-AOC-19-006.
/// </summary>
public static class AuthTenantTestFixtures
{
/// <summary>
/// Valid tenant identifiers that pass validation.
/// Use these in test configurations.
/// </summary>
public static class ValidTenants
{
public const string TestTenant = "test-tenant";
public const string DevTenant = "dev-tenant";
public const string StagingTenant = "staging-tenant";
public const string ProdTenant = "prod-tenant";
public const string ChunkTestTenant = "chunk-test-tenant";
public const string AocTestTenant = "aoc-test-tenant";
public const string IntegrationTenant = "integration-tenant";
public static readonly string[] AllTestTenants =
[
TestTenant,
DevTenant,
StagingTenant,
ChunkTestTenant,
AocTestTenant,
IntegrationTenant
];
}
/// <summary>
/// Invalid tenant identifiers for negative tests.
/// </summary>
public static class InvalidTenants
{
public const string EmptyTenant = "";
public const string WhitespaceTenant = " ";
public const string UppercaseTenant = "Test-Tenant"; // Uppercase not allowed
public const string SpecialCharTenant = "test_tenant"; // Underscore not allowed
public const string DotTenant = "test.tenant"; // Dot not allowed
public const string SpaceTenant = "test tenant"; // Space not allowed
public const string LongTenant = "this-tenant-identifier-is-way-too-long-and-exceeds-the-maximum-allowed-length";
public static readonly string[] AllInvalidTenants =
[
EmptyTenant,
WhitespaceTenant,
UppercaseTenant,
SpecialCharTenant,
DotTenant,
SpaceTenant,
LongTenant
];
}
/// <summary>
/// Creates an authority configuration with the given required tenants.
/// </summary>
public static AuthorityTestConfiguration CreateAuthorityConfig(params string[] requiredTenants)
{
return new AuthorityTestConfiguration
{
RequiredTenants = requiredTenants.ToList()
};
}
/// <summary>
/// Creates a default test authority configuration.
/// </summary>
public static AuthorityTestConfiguration CreateDefaultAuthorityConfig()
{
return CreateAuthorityConfig(ValidTenants.AllTestTenants);
}
/// <summary>
/// Creates a minimal authority configuration for single-tenant tests.
/// </summary>
public static AuthorityTestConfiguration CreateSingleTenantConfig(string tenant = ValidTenants.TestTenant)
{
return CreateAuthorityConfig(tenant);
}
/// <summary>
/// Validates that a tenant ID meets the allowlist requirements.
/// </summary>
public static (bool IsValid, string? Error) ValidateTenantId(string tenantId)
{
if (string.IsNullOrEmpty(tenantId))
{
return (false, "Tenant ID cannot be null or empty");
}
if (tenantId.Length > 64)
{
return (false, "Tenant ID exceeds maximum length of 64 characters");
}
foreach (var ch in tenantId)
{
var isAlpha = ch is >= 'a' and <= 'z';
var isDigit = ch is >= '0' and <= '9';
if (!isAlpha && !isDigit && ch != '-')
{
return (false, $"Tenant ID contains invalid character '{ch}'. Use lowercase letters, digits, or '-'");
}
}
return (true, null);
}
}
/// <summary>
/// Test authority configuration.
/// </summary>
public sealed class AuthorityTestConfiguration
{
public IList<string> RequiredTenants { get; init; } = [];
public bool Enabled { get; init; } = true;
public string? Audience { get; init; } = "concelier-api";
public string? Issuer { get; init; } = "https://test-authority.stellaops.local";
}

View File

@@ -1,4 +1,5 @@
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Services;
namespace StellaOps.Findings.Ledger.Infrastructure;
@@ -34,4 +35,35 @@ public interface IFindingProjectionRepository
string tenantId,
DateTimeOffset since,
CancellationToken cancellationToken);
/// <summary>
/// Queries scored findings with filtering and pagination.
/// </summary>
Task<(IReadOnlyList<FindingProjection> Projections, int TotalCount)> QueryScoredAsync(
ScoredFindingsQuery query,
CancellationToken cancellationToken);
/// <summary>
/// Gets the severity distribution for a tenant.
/// </summary>
Task<SeverityDistribution> GetSeverityDistributionAsync(
string tenantId,
string? policyVersion,
CancellationToken cancellationToken);
/// <summary>
/// Gets the score distribution for a tenant.
/// </summary>
Task<ScoreDistribution> GetScoreDistributionAsync(
string tenantId,
string? policyVersion,
CancellationToken cancellationToken);
/// <summary>
/// Gets aggregate risk statistics for a tenant.
/// </summary>
Task<(int Total, int Scored, decimal AvgScore, decimal MaxScore)> GetRiskAggregatesAsync(
string tenantId,
string? policyVersion,
CancellationToken cancellationToken);
}

View File

@@ -38,6 +38,40 @@ public sealed class LedgerDataSource : IAsyncDisposable
public Task<NpgsqlConnection> OpenConnectionAsync(string tenantId, string role, CancellationToken cancellationToken)
=> OpenConnectionInternalAsync(tenantId, role, cancellationToken);
/// <summary>
/// Opens a system connection without tenant context. For migrations and admin operations only.
/// RLS policies will block queries on tenant-scoped tables unless using BYPASSRLS role.
/// </summary>
public async Task<NpgsqlConnection> OpenSystemConnectionAsync(CancellationToken cancellationToken)
{
var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);
try
{
await using var command = new NpgsqlCommand("SET TIME ZONE 'UTC';", connection);
command.CommandTimeout = _options.CommandTimeoutSeconds;
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
LedgerMetrics.ConnectionOpened("system");
connection.StateChange += (_, args) =>
{
if (args.CurrentState == ConnectionState.Closed)
{
LedgerMetrics.ConnectionClosed("system");
}
};
_logger.LogDebug("Opened system connection without tenant context (for migrations/admin)");
}
catch
{
await connection.DisposeAsync().ConfigureAwait(false);
throw;
}
return connection;
}
private async Task<NpgsqlConnection> OpenConnectionInternalAsync(string tenantId, string role, CancellationToken cancellationToken)
{
var connection = await _dataSource.OpenConnectionAsync(cancellationToken).ConfigureAwait(false);

View File

@@ -4,6 +4,7 @@ using Npgsql;
using NpgsqlTypes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
using StellaOps.Findings.Ledger.Services;
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
@@ -395,4 +396,264 @@ public sealed class PostgresFindingProjectionRepository : IFindingProjectionRepo
return new FindingStatsResult(0, 0, 0, 0, 0, 0);
}
public async Task<(IReadOnlyList<FindingProjection> Projections, int TotalCount)> QueryScoredAsync(
ScoredFindingsQuery query,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(query);
ArgumentException.ThrowIfNullOrWhiteSpace(query.TenantId);
await using var connection = await _dataSource.OpenConnectionAsync(query.TenantId, "projector", cancellationToken).ConfigureAwait(false);
// Build dynamic query
var whereConditions = new List<string> { "tenant_id = @tenant_id" };
var parameters = new List<NpgsqlParameter>
{
new NpgsqlParameter<string>("tenant_id", query.TenantId) { NpgsqlDbType = NpgsqlDbType.Text }
};
if (!string.IsNullOrWhiteSpace(query.PolicyVersion))
{
whereConditions.Add("policy_version = @policy_version");
parameters.Add(new NpgsqlParameter<string>("policy_version", query.PolicyVersion) { NpgsqlDbType = NpgsqlDbType.Text });
}
if (query.MinScore.HasValue)
{
whereConditions.Add("risk_score >= @min_score");
parameters.Add(new NpgsqlParameter<decimal>("min_score", query.MinScore.Value) { NpgsqlDbType = NpgsqlDbType.Numeric });
}
if (query.MaxScore.HasValue)
{
whereConditions.Add("risk_score <= @max_score");
parameters.Add(new NpgsqlParameter<decimal>("max_score", query.MaxScore.Value) { NpgsqlDbType = NpgsqlDbType.Numeric });
}
if (query.Severities is { Count: > 0 })
{
whereConditions.Add("risk_severity = ANY(@severities)");
parameters.Add(new NpgsqlParameter("severities", query.Severities.ToArray()) { NpgsqlDbType = NpgsqlDbType.Array | NpgsqlDbType.Text });
}
if (query.Statuses is { Count: > 0 })
{
whereConditions.Add("status = ANY(@statuses)");
parameters.Add(new NpgsqlParameter("statuses", query.Statuses.ToArray()) { NpgsqlDbType = NpgsqlDbType.Array | NpgsqlDbType.Text });
}
var whereClause = string.Join(" AND ", whereConditions);
var orderColumn = query.SortBy switch
{
ScoredFindingsSortField.RiskScore => "risk_score",
ScoredFindingsSortField.RiskSeverity => "risk_severity",
ScoredFindingsSortField.UpdatedAt => "updated_at",
ScoredFindingsSortField.FindingId => "finding_id",
_ => "risk_score"
};
var orderDirection = query.Descending ? "DESC NULLS LAST" : "ASC NULLS FIRST";
// Count query
var countSql = $"SELECT COUNT(*) FROM findings_projection WHERE {whereClause}";
await using var countCommand = new NpgsqlCommand(countSql, connection);
countCommand.CommandTimeout = _dataSource.CommandTimeoutSeconds;
foreach (var p in parameters) countCommand.Parameters.Add(p.Clone());
var totalCount = Convert.ToInt32(await countCommand.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false));
// Data query
var dataSql = $@"
SELECT
tenant_id, finding_id, policy_version, status, severity, risk_score, risk_severity,
risk_profile_version, risk_explanation_id, risk_event_sequence, labels, current_event_id,
explain_ref, policy_rationale, updated_at, cycle_hash
FROM findings_projection
WHERE {whereClause}
ORDER BY {orderColumn} {orderDirection}
LIMIT @limit";
parameters.Add(new NpgsqlParameter<int>("limit", query.Limit) { NpgsqlDbType = NpgsqlDbType.Integer });
await using var dataCommand = new NpgsqlCommand(dataSql, connection);
dataCommand.CommandTimeout = _dataSource.CommandTimeoutSeconds;
foreach (var p in parameters) dataCommand.Parameters.Add(p.Clone());
var results = new List<FindingProjection>();
await using var reader = await dataCommand.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
results.Add(MapProjection(reader));
}
return (results, totalCount);
}
public async Task<SeverityDistribution> GetSeverityDistributionAsync(
string tenantId,
string? policyVersion,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var sql = @"
SELECT
COALESCE(SUM(CASE WHEN risk_severity = 'critical' THEN 1 ELSE 0 END), 0) as critical,
COALESCE(SUM(CASE WHEN risk_severity = 'high' THEN 1 ELSE 0 END), 0) as high,
COALESCE(SUM(CASE WHEN risk_severity = 'medium' THEN 1 ELSE 0 END), 0) as medium,
COALESCE(SUM(CASE WHEN risk_severity = 'low' THEN 1 ELSE 0 END), 0) as low,
COALESCE(SUM(CASE WHEN risk_severity = 'informational' THEN 1 ELSE 0 END), 0) as informational,
COALESCE(SUM(CASE WHEN risk_severity IS NULL THEN 1 ELSE 0 END), 0) as unscored
FROM findings_projection
WHERE tenant_id = @tenant_id";
if (!string.IsNullOrWhiteSpace(policyVersion))
{
sql += " AND policy_version = @policy_version";
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
if (!string.IsNullOrWhiteSpace(policyVersion))
{
command.Parameters.AddWithValue("policy_version", policyVersion);
}
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return new SeverityDistribution
{
Critical = reader.GetInt32(0),
High = reader.GetInt32(1),
Medium = reader.GetInt32(2),
Low = reader.GetInt32(3),
Informational = reader.GetInt32(4),
Unscored = reader.GetInt32(5)
};
}
return new SeverityDistribution();
}
public async Task<ScoreDistribution> GetScoreDistributionAsync(
string tenantId,
string? policyVersion,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var sql = @"
SELECT
COALESCE(SUM(CASE WHEN risk_score >= 0 AND risk_score < 0.2 THEN 1 ELSE 0 END), 0) as score_0_20,
COALESCE(SUM(CASE WHEN risk_score >= 0.2 AND risk_score < 0.4 THEN 1 ELSE 0 END), 0) as score_20_40,
COALESCE(SUM(CASE WHEN risk_score >= 0.4 AND risk_score < 0.6 THEN 1 ELSE 0 END), 0) as score_40_60,
COALESCE(SUM(CASE WHEN risk_score >= 0.6 AND risk_score < 0.8 THEN 1 ELSE 0 END), 0) as score_60_80,
COALESCE(SUM(CASE WHEN risk_score >= 0.8 THEN 1 ELSE 0 END), 0) as score_80_100
FROM findings_projection
WHERE tenant_id = @tenant_id AND risk_score IS NOT NULL";
if (!string.IsNullOrWhiteSpace(policyVersion))
{
sql += " AND policy_version = @policy_version";
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
if (!string.IsNullOrWhiteSpace(policyVersion))
{
command.Parameters.AddWithValue("policy_version", policyVersion);
}
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return new ScoreDistribution
{
Score0To20 = reader.GetInt32(0),
Score20To40 = reader.GetInt32(1),
Score40To60 = reader.GetInt32(2),
Score60To80 = reader.GetInt32(3),
Score80To100 = reader.GetInt32(4)
};
}
return new ScoreDistribution();
}
public async Task<(int Total, int Scored, decimal AvgScore, decimal MaxScore)> GetRiskAggregatesAsync(
string tenantId,
string? policyVersion,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var sql = @"
SELECT
COUNT(*) as total,
COUNT(risk_score) as scored,
COALESCE(AVG(risk_score), 0) as avg_score,
COALESCE(MAX(risk_score), 0) as max_score
FROM findings_projection
WHERE tenant_id = @tenant_id";
if (!string.IsNullOrWhiteSpace(policyVersion))
{
sql += " AND policy_version = @policy_version";
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "projector", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
if (!string.IsNullOrWhiteSpace(policyVersion))
{
command.Parameters.AddWithValue("policy_version", policyVersion);
}
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return (
reader.GetInt32(0),
reader.GetInt32(1),
reader.GetDecimal(2),
reader.GetDecimal(3));
}
return (0, 0, 0m, 0m);
}
private static FindingProjection MapProjection(NpgsqlDataReader reader)
{
var labelsJson = reader.GetString(10);
var labels = System.Text.Json.Nodes.JsonNode.Parse(labelsJson) as System.Text.Json.Nodes.JsonObject ?? new System.Text.Json.Nodes.JsonObject();
var rationaleJson = reader.GetString(13);
var rationale = System.Text.Json.Nodes.JsonNode.Parse(rationaleJson) as System.Text.Json.Nodes.JsonArray ?? new System.Text.Json.Nodes.JsonArray();
return new FindingProjection(
TenantId: reader.GetString(0),
FindingId: reader.GetString(1),
PolicyVersion: reader.GetString(2),
Status: reader.GetString(3),
Severity: reader.IsDBNull(4) ? null : reader.GetDecimal(4),
RiskScore: reader.IsDBNull(5) ? null : reader.GetDecimal(5),
RiskSeverity: reader.IsDBNull(6) ? null : reader.GetString(6),
RiskProfileVersion: reader.IsDBNull(7) ? null : reader.GetString(7),
RiskExplanationId: reader.IsDBNull(8) ? null : reader.GetGuid(8),
RiskEventSequence: reader.IsDBNull(9) ? null : reader.GetInt64(9),
Labels: labels,
CurrentEventId: reader.GetGuid(11),
ExplainRef: reader.IsDBNull(12) ? null : reader.GetString(12),
PolicyRationale: rationale,
UpdatedAt: reader.GetDateTime(14),
CycleHash: reader.GetString(15));
}
}

View File

@@ -0,0 +1,168 @@
using Microsoft.Extensions.Logging;
using Npgsql;
namespace StellaOps.Findings.Ledger.Infrastructure.Postgres;
/// <summary>
/// Service for validating Row-Level Security configuration on Findings Ledger tables.
/// Used for compliance checks and deployment verification.
/// </summary>
public sealed class RlsValidationService
{
private readonly LedgerDataSource _dataSource;
private readonly ILogger<RlsValidationService> _logger;
private static readonly string[] RlsProtectedTables =
[
"ledger_events",
"ledger_merkle_roots",
"findings_projection",
"finding_history",
"triage_actions",
"ledger_attestations",
"orchestrator_exports",
"airgap_imports"
];
public RlsValidationService(
LedgerDataSource dataSource,
ILogger<RlsValidationService> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Validates that all required tables have RLS enabled and policies configured.
/// </summary>
public async Task<RlsValidationResult> ValidateAsync(CancellationToken cancellationToken = default)
{
var result = new RlsValidationResult();
try
{
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
// Check RLS enabled on all tables
var rlsStatus = await CheckRlsEnabledAsync(connection, cancellationToken).ConfigureAwait(false);
result.TablesWithRlsEnabled = rlsStatus;
// Check policies exist
var policyStatus = await CheckPoliciesExistAsync(connection, cancellationToken).ConfigureAwait(false);
result.TablesWithPolicies = policyStatus;
// Check tenant function exists
result.TenantFunctionExists = await CheckTenantFunctionExistsAsync(connection, cancellationToken).ConfigureAwait(false);
// Determine overall status
result.IsCompliant = result.TablesWithRlsEnabled.Count == RlsProtectedTables.Length
&& result.TablesWithPolicies.Count == RlsProtectedTables.Length
&& result.TenantFunctionExists;
if (!result.IsCompliant)
{
var missingRls = RlsProtectedTables.Except(result.TablesWithRlsEnabled).ToList();
var missingPolicies = RlsProtectedTables.Except(result.TablesWithPolicies).ToList();
result.Issues.AddRange(missingRls.Select(t => $"Table '{t}' does not have RLS enabled"));
result.Issues.AddRange(missingPolicies.Select(t => $"Table '{t}' does not have tenant isolation policy"));
if (!result.TenantFunctionExists)
{
result.Issues.Add("Function 'findings_ledger_app.require_current_tenant()' does not exist");
}
_logger.LogWarning("RLS validation failed: {IssueCount} issues found", result.Issues.Count);
}
else
{
_logger.LogInformation("RLS validation passed: All {TableCount} tables are properly protected", RlsProtectedTables.Length);
}
}
catch (Exception ex)
{
result.IsCompliant = false;
result.Issues.Add($"Validation failed with error: {ex.Message}");
_logger.LogError(ex, "RLS validation failed with exception");
}
return result;
}
private async Task<List<string>> CheckRlsEnabledAsync(NpgsqlConnection connection, CancellationToken cancellationToken)
{
const string sql = """
SELECT tablename::TEXT
FROM pg_tables
WHERE schemaname = 'public'
AND tablename = ANY(@tables)
AND tablename IN (
SELECT relname::TEXT
FROM pg_class
WHERE relrowsecurity = true
)
""";
await using var cmd = new NpgsqlCommand(sql, connection);
cmd.Parameters.AddWithValue("tables", RlsProtectedTables);
var tables = new List<string>();
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
tables.Add(reader.GetString(0));
}
return tables;
}
private async Task<List<string>> CheckPoliciesExistAsync(NpgsqlConnection connection, CancellationToken cancellationToken)
{
const string sql = """
SELECT DISTINCT tablename::TEXT
FROM pg_policies
WHERE schemaname = 'public'
AND tablename = ANY(@tables)
AND policyname LIKE '%_tenant_isolation'
""";
await using var cmd = new NpgsqlCommand(sql, connection);
cmd.Parameters.AddWithValue("tables", RlsProtectedTables);
var tables = new List<string>();
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
tables.Add(reader.GetString(0));
}
return tables;
}
private async Task<bool> CheckTenantFunctionExistsAsync(NpgsqlConnection connection, CancellationToken cancellationToken)
{
const string sql = """
SELECT COUNT(*)
FROM pg_proc p
JOIN pg_namespace n ON p.pronamespace = n.oid
WHERE p.proname = 'require_current_tenant'
AND n.nspname = 'findings_ledger_app'
""";
await using var cmd = new NpgsqlCommand(sql, connection);
var count = await cmd.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return Convert.ToInt64(count) > 0;
}
}
/// <summary>
/// Result of RLS validation.
/// </summary>
public sealed class RlsValidationResult
{
public bool IsCompliant { get; set; }
public List<string> TablesWithRlsEnabled { get; set; } = [];
public List<string> TablesWithPolicies { get; set; } = [];
public bool TenantFunctionExists { get; set; }
public List<string> Issues { get; set; } = [];
}

View File

@@ -264,6 +264,184 @@ internal static class LedgerMetrics
StalenessValidationFailures.Add(1, tags);
}
private static readonly Counter<long> ScoredFindingsExports = Meter.CreateCounter<long>(
"ledger_scored_findings_exports_total",
description: "Count of scored findings export operations.");
private static readonly Histogram<double> ScoredFindingsExportDuration = Meter.CreateHistogram<double>(
"ledger_scored_findings_export_duration_seconds",
unit: "s",
description: "Duration of scored findings export operations.");
public static void RecordScoredFindingsExport(string? tenantId, int recordCount, double durationSeconds)
{
var tags = new KeyValuePair<string, object?>[]
{
new("tenant", tenantId ?? "unknown"),
new("record_count", recordCount)
};
ScoredFindingsExports.Add(1, tags);
ScoredFindingsExportDuration.Record(durationSeconds, tags);
}
// LEDGER-RISK-69-001: Scoring metrics/dashboards
private static readonly Histogram<double> ScoringLatencySeconds = Meter.CreateHistogram<double>(
"ledger_scoring_latency_seconds",
unit: "s",
description: "Latency of risk scoring operations per finding.");
private static readonly Counter<long> ScoringOperationsTotal = Meter.CreateCounter<long>(
"ledger_scoring_operations_total",
description: "Total number of scoring operations by result.");
private static readonly Counter<long> ScoringProviderGaps = Meter.CreateCounter<long>(
"ledger_scoring_provider_gaps_total",
description: "Count of findings where scoring provider was unavailable or returned no data.");
private static readonly ConcurrentDictionary<string, SeveritySnapshot> SeverityByTenantPolicy = new(StringComparer.Ordinal);
private static readonly ConcurrentDictionary<string, double> ScoreFreshnessByTenant = new(StringComparer.Ordinal);
private static readonly ObservableGauge<long> SeverityCriticalGauge =
Meter.CreateObservableGauge("ledger_severity_distribution_critical", ObserveSeverityCritical,
description: "Current count of critical severity findings by tenant and policy.");
private static readonly ObservableGauge<long> SeverityHighGauge =
Meter.CreateObservableGauge("ledger_severity_distribution_high", ObserveSeverityHigh,
description: "Current count of high severity findings by tenant and policy.");
private static readonly ObservableGauge<long> SeverityMediumGauge =
Meter.CreateObservableGauge("ledger_severity_distribution_medium", ObserveSeverityMedium,
description: "Current count of medium severity findings by tenant and policy.");
private static readonly ObservableGauge<long> SeverityLowGauge =
Meter.CreateObservableGauge("ledger_severity_distribution_low", ObserveSeverityLow,
description: "Current count of low severity findings by tenant and policy.");
private static readonly ObservableGauge<long> SeverityUnknownGauge =
Meter.CreateObservableGauge("ledger_severity_distribution_unknown", ObserveSeverityUnknown,
description: "Current count of unknown/unscored findings by tenant and policy.");
private static readonly ObservableGauge<double> ScoreFreshnessGauge =
Meter.CreateObservableGauge("ledger_score_freshness_seconds", ObserveScoreFreshness, unit: "s",
description: "Time since last scoring operation completed by tenant.");
public static void RecordScoringLatency(TimeSpan duration, string? tenantId, string? policyVersion, string result)
{
var tags = new KeyValuePair<string, object?>[]
{
new("tenant", tenantId ?? string.Empty),
new("policy_version", policyVersion ?? string.Empty),
new("result", result)
};
ScoringLatencySeconds.Record(duration.TotalSeconds, tags);
ScoringOperationsTotal.Add(1, tags);
}
public static void RecordScoringProviderGap(string? tenantId, string? provider, string reason)
{
var tags = new KeyValuePair<string, object?>[]
{
new("tenant", tenantId ?? string.Empty),
new("provider", provider ?? "unknown"),
new("reason", reason)
};
ScoringProviderGaps.Add(1, tags);
}
public static void UpdateSeverityDistribution(
string tenantId,
string? policyVersion,
int critical,
int high,
int medium,
int low,
int unknown)
{
var key = BuildTenantPolicyKey(tenantId, policyVersion);
SeverityByTenantPolicy[key] = new SeveritySnapshot(tenantId, policyVersion ?? "default", critical, high, medium, low, unknown);
}
public static void UpdateScoreFreshness(string tenantId, double secondsSinceLastScoring)
{
var key = NormalizeTenant(tenantId);
ScoreFreshnessByTenant[key] = secondsSinceLastScoring < 0 ? 0 : secondsSinceLastScoring;
}
private static string BuildTenantPolicyKey(string? tenantId, string? policyVersion)
{
var t = string.IsNullOrWhiteSpace(tenantId) ? string.Empty : tenantId;
var p = string.IsNullOrWhiteSpace(policyVersion) ? "default" : policyVersion;
return $"{t}|{p}";
}
private sealed record SeveritySnapshot(
string TenantId,
string PolicyVersion,
int Critical,
int High,
int Medium,
int Low,
int Unknown);
private static IEnumerable<Measurement<long>> ObserveSeverityCritical()
{
foreach (var kvp in SeverityByTenantPolicy)
{
yield return new Measurement<long>(kvp.Value.Critical,
new KeyValuePair<string, object?>("tenant", kvp.Value.TenantId),
new KeyValuePair<string, object?>("policy_version", kvp.Value.PolicyVersion));
}
}
private static IEnumerable<Measurement<long>> ObserveSeverityHigh()
{
foreach (var kvp in SeverityByTenantPolicy)
{
yield return new Measurement<long>(kvp.Value.High,
new KeyValuePair<string, object?>("tenant", kvp.Value.TenantId),
new KeyValuePair<string, object?>("policy_version", kvp.Value.PolicyVersion));
}
}
private static IEnumerable<Measurement<long>> ObserveSeverityMedium()
{
foreach (var kvp in SeverityByTenantPolicy)
{
yield return new Measurement<long>(kvp.Value.Medium,
new KeyValuePair<string, object?>("tenant", kvp.Value.TenantId),
new KeyValuePair<string, object?>("policy_version", kvp.Value.PolicyVersion));
}
}
private static IEnumerable<Measurement<long>> ObserveSeverityLow()
{
foreach (var kvp in SeverityByTenantPolicy)
{
yield return new Measurement<long>(kvp.Value.Low,
new KeyValuePair<string, object?>("tenant", kvp.Value.TenantId),
new KeyValuePair<string, object?>("policy_version", kvp.Value.PolicyVersion));
}
}
private static IEnumerable<Measurement<long>> ObserveSeverityUnknown()
{
foreach (var kvp in SeverityByTenantPolicy)
{
yield return new Measurement<long>(kvp.Value.Unknown,
new KeyValuePair<string, object?>("tenant", kvp.Value.TenantId),
new KeyValuePair<string, object?>("policy_version", kvp.Value.PolicyVersion));
}
}
private static IEnumerable<Measurement<double>> ObserveScoreFreshness()
{
foreach (var kvp in ScoreFreshnessByTenant)
{
yield return new Measurement<double>(kvp.Value, new KeyValuePair<string, object?>("tenant", kvp.Key));
}
}
private static IEnumerable<Measurement<double>> ObserveProjectionLag()
{
foreach (var kvp in ProjectionLagByTenant)

View File

@@ -0,0 +1,57 @@
namespace StellaOps.Findings.Ledger.Services;
/// <summary>
/// Service for querying scored findings with filtering, pagination, and explainability.
/// </summary>
public interface IScoredFindingsQueryService
{
/// <summary>
/// Queries scored findings with filters and pagination.
/// </summary>
Task<ScoredFindingsQueryResult> QueryAsync(
ScoredFindingsQuery query,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a single scored finding by ID.
/// </summary>
Task<ScoredFinding?> GetByIdAsync(
string tenantId,
string findingId,
string? policyVersion = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the score explanation for a finding.
/// </summary>
Task<ScoredFindingExplanation?> GetExplanationAsync(
string tenantId,
string findingId,
Guid? explanationId = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a risk summary for a tenant.
/// </summary>
Task<RiskSummary> GetSummaryAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the severity distribution for a tenant.
/// </summary>
Task<SeverityDistribution> GetSeverityDistributionAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets top findings by risk score.
/// </summary>
Task<IReadOnlyList<ScoredFinding>> GetTopRisksAsync(
string tenantId,
int count = 10,
string? policyVersion = null,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,232 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Services;
/// <summary>
/// Service for exporting scored findings to various formats.
/// </summary>
public sealed class ScoredFindingsExportService : IScoredFindingsExportService
{
private readonly IScoredFindingsQueryService _queryService;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ScoredFindingsExportService> _logger;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false
};
public ScoredFindingsExportService(
IScoredFindingsQueryService queryService,
TimeProvider timeProvider,
ILogger<ScoredFindingsExportService> logger)
{
_queryService = queryService ?? throw new ArgumentNullException(nameof(queryService));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<ExportResult> ExportAsync(
ScoredFindingsExportRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.TenantId);
var startTime = _timeProvider.GetUtcNow();
var query = new ScoredFindingsQuery
{
TenantId = request.TenantId,
PolicyVersion = request.PolicyVersion,
MinScore = request.MinScore,
MaxScore = request.MaxScore,
Severities = request.Severities,
Statuses = request.Statuses,
Limit = request.MaxRecords ?? 10000,
SortBy = ScoredFindingsSortField.RiskScore,
Descending = true
};
var result = await _queryService.QueryAsync(query, cancellationToken).ConfigureAwait(false);
var exportData = request.Format switch
{
ExportFormat.Json => ExportToJson(result.Findings, request),
ExportFormat.Ndjson => ExportToNdjson(result.Findings, request),
ExportFormat.Csv => ExportToCsv(result.Findings, request),
_ => throw new ArgumentOutOfRangeException(nameof(request.Format))
};
var endTime = _timeProvider.GetUtcNow();
var duration = endTime - startTime;
LedgerMetrics.RecordScoredFindingsExport(request.TenantId, result.Findings.Count, duration.TotalSeconds);
_logger.LogInformation(
"Exported {Count} scored findings for tenant {TenantId} in {Duration:F2}s",
result.Findings.Count, request.TenantId, duration.TotalSeconds);
return new ExportResult
{
TenantId = request.TenantId,
Format = request.Format,
RecordCount = result.Findings.Count,
Data = exportData,
ContentType = GetContentType(request.Format),
GeneratedAt = endTime,
DurationMs = (long)duration.TotalMilliseconds
};
}
public async Task<Stream> ExportToStreamAsync(
ScoredFindingsExportRequest request,
CancellationToken cancellationToken = default)
{
var result = await ExportAsync(request, cancellationToken).ConfigureAwait(false);
return new MemoryStream(result.Data);
}
private static byte[] ExportToJson(IReadOnlyList<ScoredFinding> findings, ScoredFindingsExportRequest request)
{
var envelope = new JsonObject
{
["version"] = "1.0",
["tenant_id"] = request.TenantId,
["generated_at"] = DateTimeOffset.UtcNow.ToString("O"),
["record_count"] = findings.Count,
["findings"] = new JsonArray(findings.Select(MapToJsonNode).ToArray())
};
return JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions);
}
private static byte[] ExportToNdjson(IReadOnlyList<ScoredFinding> findings, ScoredFindingsExportRequest request)
{
var sb = new StringBuilder();
foreach (var finding in findings)
{
sb.AppendLine(JsonSerializer.Serialize(MapToExportRecord(finding), JsonOptions));
}
return Encoding.UTF8.GetBytes(sb.ToString());
}
private static byte[] ExportToCsv(IReadOnlyList<ScoredFinding> findings, ScoredFindingsExportRequest request)
{
var sb = new StringBuilder();
sb.AppendLine("tenant_id,finding_id,policy_version,status,risk_score,risk_severity,risk_profile_version,updated_at");
foreach (var finding in findings)
{
sb.AppendLine(string.Join(",",
EscapeCsv(finding.TenantId),
EscapeCsv(finding.FindingId),
EscapeCsv(finding.PolicyVersion),
EscapeCsv(finding.Status),
finding.RiskScore?.ToString("F4") ?? "",
EscapeCsv(finding.RiskSeverity ?? ""),
EscapeCsv(finding.RiskProfileVersion ?? ""),
finding.UpdatedAt.ToString("O")));
}
return Encoding.UTF8.GetBytes(sb.ToString());
}
private static JsonNode MapToJsonNode(ScoredFinding finding)
{
return JsonSerializer.SerializeToNode(MapToExportRecord(finding), JsonOptions)!;
}
private static object MapToExportRecord(ScoredFinding finding)
{
return new
{
finding.TenantId,
finding.FindingId,
finding.PolicyVersion,
finding.Status,
finding.RiskScore,
finding.RiskSeverity,
finding.RiskProfileVersion,
finding.RiskExplanationId,
finding.ExplainRef,
finding.UpdatedAt
};
}
private static string EscapeCsv(string value)
{
if (string.IsNullOrEmpty(value)) return "";
if (value.Contains(',') || value.Contains('"') || value.Contains('\n'))
{
return $"\"{value.Replace("\"", "\"\"")}\"";
}
return value;
}
private static string GetContentType(ExportFormat format) => format switch
{
ExportFormat.Json => "application/json",
ExportFormat.Ndjson => "application/x-ndjson",
ExportFormat.Csv => "text/csv",
_ => "application/octet-stream"
};
}
/// <summary>
/// Service interface for exporting scored findings.
/// </summary>
public interface IScoredFindingsExportService
{
Task<ExportResult> ExportAsync(
ScoredFindingsExportRequest request,
CancellationToken cancellationToken = default);
Task<Stream> ExportToStreamAsync(
ScoredFindingsExportRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request for exporting scored findings.
/// </summary>
public sealed record ScoredFindingsExportRequest
{
public required string TenantId { get; init; }
public string? PolicyVersion { get; init; }
public decimal? MinScore { get; init; }
public decimal? MaxScore { get; init; }
public IReadOnlyList<string>? Severities { get; init; }
public IReadOnlyList<string>? Statuses { get; init; }
public int? MaxRecords { get; init; }
public ExportFormat Format { get; init; } = ExportFormat.Json;
public bool IncludeExplanations { get; init; }
}
/// <summary>
/// Export formats.
/// </summary>
public enum ExportFormat
{
Json,
Ndjson,
Csv
}
/// <summary>
/// Result of an export operation.
/// </summary>
public sealed record ExportResult
{
public required string TenantId { get; init; }
public required ExportFormat Format { get; init; }
public required int RecordCount { get; init; }
public required byte[] Data { get; init; }
public required string ContentType { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
public long DurationMs { get; init; }
}

View File

@@ -0,0 +1,118 @@
namespace StellaOps.Findings.Ledger.Services;
/// <summary>
/// Query parameters for scored findings.
/// </summary>
public sealed record ScoredFindingsQuery
{
public required string TenantId { get; init; }
public string? PolicyVersion { get; init; }
public decimal? MinScore { get; init; }
public decimal? MaxScore { get; init; }
public IReadOnlyList<string>? Severities { get; init; }
public IReadOnlyList<string>? Statuses { get; init; }
public string? ProfileId { get; init; }
public DateTimeOffset? ScoredAfter { get; init; }
public DateTimeOffset? ScoredBefore { get; init; }
public string? Cursor { get; init; }
public int Limit { get; init; } = 50;
public ScoredFindingsSortField SortBy { get; init; } = ScoredFindingsSortField.RiskScore;
public bool Descending { get; init; } = true;
}
/// <summary>
/// Sort fields for scored findings queries.
/// </summary>
public enum ScoredFindingsSortField
{
RiskScore,
RiskSeverity,
UpdatedAt,
FindingId
}
/// <summary>
/// Result of a scored findings query.
/// </summary>
public sealed record ScoredFindingsQueryResult
{
public required IReadOnlyList<ScoredFinding> Findings { get; init; }
public string? NextCursor { get; init; }
public bool HasMore { get; init; }
public int TotalCount { get; init; }
}
/// <summary>
/// A finding with risk score information.
/// </summary>
public sealed record ScoredFinding
{
public required string TenantId { get; init; }
public required string FindingId { get; init; }
public required string PolicyVersion { get; init; }
public required string Status { get; init; }
public decimal? RiskScore { get; init; }
public string? RiskSeverity { get; init; }
public string? RiskProfileVersion { get; init; }
public Guid? RiskExplanationId { get; init; }
public string? ExplainRef { get; init; }
public DateTimeOffset UpdatedAt { get; init; }
}
/// <summary>
/// Detailed score explanation for a finding.
/// </summary>
public sealed record ScoredFindingExplanation
{
public required string FindingId { get; init; }
public required string ProfileId { get; init; }
public required string ProfileVersion { get; init; }
public decimal RawScore { get; init; }
public decimal NormalizedScore { get; init; }
public required string Severity { get; init; }
public required IReadOnlyDictionary<string, decimal> SignalValues { get; init; }
public required IReadOnlyDictionary<string, decimal> SignalContributions { get; init; }
public string? OverrideApplied { get; init; }
public string? OverrideReason { get; init; }
public DateTimeOffset ScoredAt { get; init; }
}
/// <summary>
/// Severity distribution summary.
/// </summary>
public sealed record SeverityDistribution
{
public int Critical { get; init; }
public int High { get; init; }
public int Medium { get; init; }
public int Low { get; init; }
public int Informational { get; init; }
public int Unscored { get; init; }
}
/// <summary>
/// Score distribution buckets.
/// </summary>
public sealed record ScoreDistribution
{
public int Score0To20 { get; init; }
public int Score20To40 { get; init; }
public int Score40To60 { get; init; }
public int Score60To80 { get; init; }
public int Score80To100 { get; init; }
}
/// <summary>
/// Risk summary for a tenant.
/// </summary>
public sealed record RiskSummary
{
public required string TenantId { get; init; }
public int TotalFindings { get; init; }
public int ScoredFindings { get; init; }
public decimal AverageScore { get; init; }
public decimal MaxScore { get; init; }
public required SeverityDistribution SeverityDistribution { get; init; }
public required ScoreDistribution ScoreDistribution { get; init; }
public DateTimeOffset CalculatedAt { get; init; }
}

View File

@@ -0,0 +1,194 @@
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Infrastructure;
namespace StellaOps.Findings.Ledger.Services;
/// <summary>
/// Service for querying scored findings with filtering, pagination, and explainability.
/// </summary>
public sealed class ScoredFindingsQueryService : IScoredFindingsQueryService
{
private readonly IFindingProjectionRepository _repository;
private readonly IRiskExplanationStore _explanationStore;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ScoredFindingsQueryService> _logger;
public ScoredFindingsQueryService(
IFindingProjectionRepository repository,
IRiskExplanationStore explanationStore,
TimeProvider timeProvider,
ILogger<ScoredFindingsQueryService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_explanationStore = explanationStore ?? throw new ArgumentNullException(nameof(explanationStore));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<ScoredFindingsQueryResult> QueryAsync(
ScoredFindingsQuery query,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(query);
ArgumentException.ThrowIfNullOrWhiteSpace(query.TenantId);
var (projections, totalCount) = await _repository.QueryScoredAsync(query, cancellationToken)
.ConfigureAwait(false);
var findings = projections
.Select(MapToScoredFinding)
.ToList();
var hasMore = findings.Count == query.Limit && totalCount > query.Limit;
var nextCursor = hasMore && findings.Count > 0
? EncodeCursor(findings[^1])
: null;
return new ScoredFindingsQueryResult
{
Findings = findings,
NextCursor = nextCursor,
HasMore = hasMore,
TotalCount = totalCount
};
}
public async Task<ScoredFinding?> GetByIdAsync(
string tenantId,
string findingId,
string? policyVersion = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
var projection = await _repository.GetAsync(
tenantId,
findingId,
policyVersion ?? "default",
cancellationToken).ConfigureAwait(false);
return projection is null ? null : MapToScoredFinding(projection);
}
public async Task<ScoredFindingExplanation?> GetExplanationAsync(
string tenantId,
string findingId,
Guid? explanationId = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentException.ThrowIfNullOrWhiteSpace(findingId);
var explanation = await _explanationStore.GetAsync(
tenantId,
findingId,
explanationId,
cancellationToken).ConfigureAwait(false);
return explanation;
}
public async Task<RiskSummary> GetSummaryAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var severityDist = await _repository.GetSeverityDistributionAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
var scoreDist = await _repository.GetScoreDistributionAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
var (total, scored, avgScore, maxScore) = await _repository.GetRiskAggregatesAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
return new RiskSummary
{
TenantId = tenantId,
TotalFindings = total,
ScoredFindings = scored,
AverageScore = avgScore,
MaxScore = maxScore,
SeverityDistribution = severityDist,
ScoreDistribution = scoreDist,
CalculatedAt = _timeProvider.GetUtcNow()
};
}
public async Task<SeverityDistribution> GetSeverityDistributionAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
return await _repository.GetSeverityDistributionAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<ScoredFinding>> GetTopRisksAsync(
string tenantId,
int count = 10,
string? policyVersion = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var query = new ScoredFindingsQuery
{
TenantId = tenantId,
PolicyVersion = policyVersion,
Limit = count,
SortBy = ScoredFindingsSortField.RiskScore,
Descending = true
};
var result = await QueryAsync(query, cancellationToken).ConfigureAwait(false);
return result.Findings;
}
private static ScoredFinding MapToScoredFinding(FindingProjection projection)
{
return new ScoredFinding
{
TenantId = projection.TenantId,
FindingId = projection.FindingId,
PolicyVersion = projection.PolicyVersion,
Status = projection.Status,
RiskScore = projection.RiskScore,
RiskSeverity = projection.RiskSeverity,
RiskProfileVersion = projection.RiskProfileVersion,
RiskExplanationId = projection.RiskExplanationId,
ExplainRef = projection.ExplainRef,
UpdatedAt = projection.UpdatedAt
};
}
private static string EncodeCursor(ScoredFinding finding)
{
// Simple cursor encoding: findingId|score|updatedAt
var cursor = $"{finding.FindingId}|{finding.RiskScore}|{finding.UpdatedAt:O}";
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(cursor));
}
}
/// <summary>
/// Store for risk score explanations.
/// </summary>
public interface IRiskExplanationStore
{
Task<ScoredFindingExplanation?> GetAsync(
string tenantId,
string findingId,
Guid? explanationId,
CancellationToken cancellationToken);
Task StoreAsync(
string tenantId,
ScoredFindingExplanation explanation,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,178 @@
using Microsoft.Extensions.Logging;
using StellaOps.Findings.Ledger.Infrastructure;
using StellaOps.Findings.Ledger.Observability;
namespace StellaOps.Findings.Ledger.Services;
/// <summary>
/// Service for emitting and updating risk scoring metrics.
/// Supports dashboards for scoring latency, severity distribution, result freshness, and provider gaps.
/// </summary>
public sealed class ScoringMetricsService : IScoringMetricsService
{
private readonly IFindingProjectionRepository _repository;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ScoringMetricsService> _logger;
public ScoringMetricsService(
IFindingProjectionRepository repository,
TimeProvider timeProvider,
ILogger<ScoringMetricsService> logger)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task RefreshSeverityDistributionAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var distribution = await _repository.GetSeverityDistributionAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
LedgerMetrics.UpdateSeverityDistribution(
tenantId,
policyVersion,
distribution.Critical,
distribution.High,
distribution.Medium,
distribution.Low,
distribution.Unscored);
_logger.LogDebug(
"Updated severity distribution for tenant {TenantId}: Critical={Critical}, High={High}, Medium={Medium}, Low={Low}, Unscored={Unscored}",
tenantId, distribution.Critical, distribution.High, distribution.Medium, distribution.Low, distribution.Unscored);
}
public void RecordScoringOperation(
string tenantId,
string? policyVersion,
TimeSpan duration,
ScoringResult result)
{
LedgerMetrics.RecordScoringLatency(duration, tenantId, policyVersion, result.ToString().ToLowerInvariant());
LedgerMetrics.UpdateScoreFreshness(tenantId, 0);
_logger.LogDebug(
"Recorded scoring operation for tenant {TenantId}: Duration={Duration:F3}s, Result={Result}",
tenantId, duration.TotalSeconds, result);
}
public void RecordProviderGap(
string tenantId,
string? provider,
string reason)
{
LedgerMetrics.RecordScoringProviderGap(tenantId, provider, reason);
_logger.LogWarning(
"Provider gap recorded for tenant {TenantId}: Provider={Provider}, Reason={Reason}",
tenantId, provider ?? "unknown", reason);
}
public void UpdateScoreFreshness(string tenantId, DateTimeOffset lastScoringTime)
{
var now = _timeProvider.GetUtcNow();
var freshness = (now - lastScoringTime).TotalSeconds;
LedgerMetrics.UpdateScoreFreshness(tenantId, freshness);
}
public async Task<ScoringMetricsSummary> GetSummaryAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
var severityDist = await _repository.GetSeverityDistributionAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
var scoreDist = await _repository.GetScoreDistributionAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
var (total, scored, avgScore, maxScore) = await _repository.GetRiskAggregatesAsync(tenantId, policyVersion, cancellationToken)
.ConfigureAwait(false);
var coveragePercent = total > 0 ? (decimal)scored / total * 100 : 0;
return new ScoringMetricsSummary
{
TenantId = tenantId,
PolicyVersion = policyVersion ?? "default",
TotalFindings = total,
ScoredFindings = scored,
UnscoredFindings = total - scored,
CoveragePercent = coveragePercent,
AverageScore = avgScore,
MaxScore = maxScore,
SeverityDistribution = severityDist,
ScoreDistribution = scoreDist,
CalculatedAt = _timeProvider.GetUtcNow()
};
}
}
/// <summary>
/// Interface for scoring metrics service.
/// </summary>
public interface IScoringMetricsService
{
Task RefreshSeverityDistributionAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default);
void RecordScoringOperation(
string tenantId,
string? policyVersion,
TimeSpan duration,
ScoringResult result);
void RecordProviderGap(
string tenantId,
string? provider,
string reason);
void UpdateScoreFreshness(string tenantId, DateTimeOffset lastScoringTime);
Task<ScoringMetricsSummary> GetSummaryAsync(
string tenantId,
string? policyVersion = null,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of a scoring operation.
/// </summary>
public enum ScoringResult
{
Success,
PartialSuccess,
ProviderUnavailable,
PolicyMissing,
ValidationFailed,
Timeout,
Error
}
/// <summary>
/// Summary of scoring metrics for a tenant.
/// </summary>
public sealed record ScoringMetricsSummary
{
public required string TenantId { get; init; }
public required string PolicyVersion { get; init; }
public int TotalFindings { get; init; }
public int ScoredFindings { get; init; }
public int UnscoredFindings { get; init; }
public decimal CoveragePercent { get; init; }
public decimal AverageScore { get; init; }
public decimal MaxScore { get; init; }
public required SeverityDistribution SeverityDistribution { get; init; }
public required ScoreDistribution ScoreDistribution { get; init; }
public DateTimeOffset CalculatedAt { get; init; }
}

View File

@@ -0,0 +1,160 @@
-- 007_enable_rls.sql
-- Enable Row-Level Security for Findings Ledger tenant isolation (LEDGER-TEN-48-001-DEV)
-- Based on Evidence Locker pattern per CONTRACT-FINDINGS-LEDGER-RLS-011
BEGIN;
-- ============================================
-- 1. Create app schema and tenant function
-- ============================================
CREATE SCHEMA IF NOT EXISTS findings_ledger_app;
CREATE OR REPLACE FUNCTION findings_ledger_app.require_current_tenant()
RETURNS TEXT
LANGUAGE plpgsql
STABLE
AS $$
DECLARE
tenant_text TEXT;
BEGIN
tenant_text := current_setting('app.current_tenant', true);
IF tenant_text IS NULL OR length(trim(tenant_text)) = 0 THEN
RAISE EXCEPTION 'app.current_tenant is not set for the current session'
USING ERRCODE = 'P0001';
END IF;
RETURN tenant_text;
END;
$$;
COMMENT ON FUNCTION findings_ledger_app.require_current_tenant() IS
'Returns the current tenant ID from session variable, raises exception if not set';
-- ============================================
-- 2. Enable RLS on ledger_events
-- ============================================
ALTER TABLE ledger_events ENABLE ROW LEVEL SECURITY;
ALTER TABLE ledger_events FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS ledger_events_tenant_isolation ON ledger_events;
CREATE POLICY ledger_events_tenant_isolation
ON ledger_events
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 3. Enable RLS on ledger_merkle_roots
-- ============================================
ALTER TABLE ledger_merkle_roots ENABLE ROW LEVEL SECURITY;
ALTER TABLE ledger_merkle_roots FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS ledger_merkle_roots_tenant_isolation ON ledger_merkle_roots;
CREATE POLICY ledger_merkle_roots_tenant_isolation
ON ledger_merkle_roots
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 4. Enable RLS on findings_projection
-- ============================================
ALTER TABLE findings_projection ENABLE ROW LEVEL SECURITY;
ALTER TABLE findings_projection FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS findings_projection_tenant_isolation ON findings_projection;
CREATE POLICY findings_projection_tenant_isolation
ON findings_projection
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 5. Enable RLS on finding_history
-- ============================================
ALTER TABLE finding_history ENABLE ROW LEVEL SECURITY;
ALTER TABLE finding_history FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS finding_history_tenant_isolation ON finding_history;
CREATE POLICY finding_history_tenant_isolation
ON finding_history
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 6. Enable RLS on triage_actions
-- ============================================
ALTER TABLE triage_actions ENABLE ROW LEVEL SECURITY;
ALTER TABLE triage_actions FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS triage_actions_tenant_isolation ON triage_actions;
CREATE POLICY triage_actions_tenant_isolation
ON triage_actions
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 7. Enable RLS on ledger_attestations
-- ============================================
ALTER TABLE ledger_attestations ENABLE ROW LEVEL SECURITY;
ALTER TABLE ledger_attestations FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS ledger_attestations_tenant_isolation ON ledger_attestations;
CREATE POLICY ledger_attestations_tenant_isolation
ON ledger_attestations
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 8. Enable RLS on orchestrator_exports
-- ============================================
ALTER TABLE orchestrator_exports ENABLE ROW LEVEL SECURITY;
ALTER TABLE orchestrator_exports FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS orchestrator_exports_tenant_isolation ON orchestrator_exports;
CREATE POLICY orchestrator_exports_tenant_isolation
ON orchestrator_exports
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 9. Enable RLS on airgap_imports
-- ============================================
ALTER TABLE airgap_imports ENABLE ROW LEVEL SECURITY;
ALTER TABLE airgap_imports FORCE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS airgap_imports_tenant_isolation ON airgap_imports;
CREATE POLICY airgap_imports_tenant_isolation
ON airgap_imports
FOR ALL
USING (tenant_id = findings_ledger_app.require_current_tenant())
WITH CHECK (tenant_id = findings_ledger_app.require_current_tenant());
-- ============================================
-- 10. Create admin bypass role
-- ============================================
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_roles WHERE rolname = 'findings_ledger_admin') THEN
CREATE ROLE findings_ledger_admin NOLOGIN BYPASSRLS;
END IF;
END;
$$;
COMMENT ON ROLE findings_ledger_admin IS
'Admin role that bypasses RLS for migrations and cross-tenant operations';
COMMIT;

View File

@@ -0,0 +1,42 @@
-- 007_enable_rls_rollback.sql
-- Rollback: Disable Row-Level Security for Findings Ledger (LEDGER-TEN-48-001-DEV)
BEGIN;
-- ============================================
-- 1. Disable RLS on all tables
-- ============================================
ALTER TABLE ledger_events DISABLE ROW LEVEL SECURITY;
ALTER TABLE ledger_merkle_roots DISABLE ROW LEVEL SECURITY;
ALTER TABLE findings_projection DISABLE ROW LEVEL SECURITY;
ALTER TABLE finding_history DISABLE ROW LEVEL SECURITY;
ALTER TABLE triage_actions DISABLE ROW LEVEL SECURITY;
ALTER TABLE ledger_attestations DISABLE ROW LEVEL SECURITY;
ALTER TABLE orchestrator_exports DISABLE ROW LEVEL SECURITY;
ALTER TABLE airgap_imports DISABLE ROW LEVEL SECURITY;
-- ============================================
-- 2. Drop all tenant isolation policies
-- ============================================
DROP POLICY IF EXISTS ledger_events_tenant_isolation ON ledger_events;
DROP POLICY IF EXISTS ledger_merkle_roots_tenant_isolation ON ledger_merkle_roots;
DROP POLICY IF EXISTS findings_projection_tenant_isolation ON findings_projection;
DROP POLICY IF EXISTS finding_history_tenant_isolation ON finding_history;
DROP POLICY IF EXISTS triage_actions_tenant_isolation ON triage_actions;
DROP POLICY IF EXISTS ledger_attestations_tenant_isolation ON ledger_attestations;
DROP POLICY IF EXISTS orchestrator_exports_tenant_isolation ON orchestrator_exports;
DROP POLICY IF EXISTS airgap_imports_tenant_isolation ON airgap_imports;
-- ============================================
-- 3. Drop tenant validation function and schema
-- ============================================
DROP FUNCTION IF EXISTS findings_ledger_app.require_current_tenant();
DROP SCHEMA IF EXISTS findings_ledger_app;
-- Note: Admin role is NOT dropped to avoid breaking other grants
-- DROP ROLE IF EXISTS findings_ledger_admin;
COMMIT;

View File

@@ -19,7 +19,7 @@
| UI-POLICY-23-001 | DONE (2025-12-05) | Workspace route `/policy-studio/packs` with pack list + quick actions; cached pack store with offline fallback. |
| UI-POLICY-23-002 | DONE (2025-12-05) | YAML editor route `/policy-studio/packs/:packId/yaml` with canonical preview and lint diagnostics. |
| UI-POLICY-23-003 | DONE (2025-12-05) | Rule Builder route `/policy-studio/packs/:packId/rules` with guided inputs and deterministic preview JSON. |
| UI-POLICY-23-004 | DONE (2025-12-05) | Approval workflow UI updated with readiness checklist, schedule window card, comment thread, and two-person indicator; targeted Karma spec build succeeds, execution blocked by missing system lib (`libnss3.so`) for ChromeHeadless. |
| UI-POLICY-23-004 | DONE (2025-12-05) | Approval workflow UI with checklist/schedule/comments; targeted Karma spec now passes locally using Playwright Chromium + bundled NSS libs (`CHROME_BIN=$HOME/.cache/ms-playwright/chromium-1140/chrome-linux/chrome`, `LD_LIBRARY_PATH=$PWD/.deps/usr/lib/x86_64-linux-gnu`). |
| UI-POLICY-23-005 | DONE (2025-12-05) | Simulator updated with SBOM/advisory pickers and explain trace view; uses PolicyApiService simulate. |
| UI-POLICY-23-006 | DOING (2025-12-05) | Explain view route `/policy-studio/packs/:packId/explain/:runId` with trace + JSON export; PDF export pending backend. |
| UI-POLICY-23-006 | DONE (2025-12-06) | Explain view route `/policy-studio/packs/:packId/explain/:runId` with trace + JSON/PDF export (uses offline-safe jsPDF shim). |
| UI-POLICY-23-001 | DONE (2025-12-05) | Workspace route `/policy-studio/packs` with pack list + quick actions; cached pack store with offline fallback. |

View File

@@ -1,5 +1,5 @@
import { CommonModule } from '@angular/common';
import { ComponentFixture, TestBed, fakeAsync, tick } from '@angular/core/testing';
import { ComponentFixture, TestBed, fakeAsync, tick, waitForAsync } from '@angular/core/testing';
import { ReactiveFormsModule } from '@angular/forms';
import { ActivatedRoute, convertToParamMap } from '@angular/router';
import { of } from 'rxjs';
@@ -14,7 +14,7 @@ describe('PolicyApprovalsComponent', () => {
let api: jasmine.SpyObj<PolicyApiService>;
let auth: any;
beforeEach(async () => {
beforeEach(waitForAsync(() => {
api = jasmine.createSpyObj<PolicyApiService>('PolicyApiService', [
'getApprovalWorkflow',
'submitForReview',
@@ -80,7 +80,7 @@ describe('PolicyApprovalsComponent', () => {
canReviewPolicies: () => true,
};
await TestBed.configureTestingModule({
TestBed.configureTestingModule({
imports: [CommonModule, ReactiveFormsModule, PolicyApprovalsComponent],
providers: [
{ provide: PolicyApiService, useValue: api },
@@ -95,13 +95,14 @@ describe('PolicyApprovalsComponent', () => {
},
},
],
}).compileComponents();
fixture = TestBed.createComponent(PolicyApprovalsComponent);
component = fixture.componentInstance;
fixture.detectChanges();
tick();
});
})
.compileComponents()
.then(() => {
fixture = TestBed.createComponent(PolicyApprovalsComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
}));
it('sorts reviews newest first', () => {
const reviews = component.sortedReviews;
@@ -120,15 +121,15 @@ describe('PolicyApprovalsComponent', () => {
component.onSubmit();
expect(api.submitForReview).toHaveBeenCalledWith({
policyId: 'pack-1',
version: '1.0.0',
message: 'Please review',
coverageResults: undefined,
simulationDiff: undefined,
scheduleStart: '2025-12-10T00:00',
scheduleEnd: '2025-12-11T00:00',
});
expect(api.submitForReview).toHaveBeenCalledWith(
jasmine.objectContaining({
policyId: 'pack-1',
version: '1.0.0',
message: 'Please review',
scheduleStart: '2025-12-10T00:00',
scheduleEnd: '2025-12-11T00:00',
})
);
});
it('persists schedule changes via updateApprovalSchedule', () => {

View File

@@ -22,7 +22,7 @@ import { PolicyApiService } from '../services/policy-api.service';
imports: [CommonModule, ReactiveFormsModule],
changeDetection: ChangeDetectionStrategy.OnPush,
template: `
<section class="approvals" aria-busy="{{ loading }}">
<section class="approvals" [attr.aria-busy]="loading">
<header class="approvals__header">
<div>
<p class="approvals__eyebrow">Policy Studio · Approvals</p>
@@ -539,13 +539,15 @@ export class PolicyApprovalsComponent {
if (!packId || this.submitForm.invalid) return;
const schedule = this.schedulePayload();
const coverage = this.submitForm.value.coverageResults?.trim();
const simulation = this.submitForm.value.simulationDiff?.trim();
const payload: PolicySubmissionRequest = {
policyId: packId,
version: version ?? 'latest',
message: this.submitForm.value.message ?? '',
coverageResults: this.submitForm.value.coverageResults ?? undefined,
simulationDiff: this.submitForm.value.simulationDiff ?? undefined,
coverageResults: coverage ? coverage : undefined,
simulationDiff: simulation ? simulation : undefined,
scheduleStart: schedule.start,
scheduleEnd: schedule.end,
};

View File

@@ -18,7 +18,7 @@ import {
imports: [CommonModule, ReactiveFormsModule],
changeDetection: ChangeDetectionStrategy.OnPush,
template: `
<section class="dash" aria-busy="{{ loading }}">
<section class="dash" [attr.aria-busy]="loading">
<header class="dash__header">
<div>
<p class="dash__eyebrow">Policy Studio · Runs</p>