documentation cleanse, sprints work and planning. remaining non EF DAL migration to EF

This commit is contained in:
master
2026-02-25 01:24:07 +02:00
parent b07d27772e
commit 4db038123b
9090 changed files with 4836 additions and 2909 deletions

View File

@@ -79,7 +79,19 @@ public sealed record AddTurnRequest
/// Gets the user message content.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
public string? Content { get; init; }
/// <summary>
/// Gets the legacy user message content field.
/// </summary>
[JsonPropertyName("message")]
public string? Message { get; init; }
/// <summary>
/// Gets normalized message content from canonical or legacy payloads.
/// </summary>
[JsonIgnore]
public string? EffectiveContent => !string.IsNullOrWhiteSpace(Content) ? Content : Message;
/// <summary>
/// Gets optional metadata for this turn.

View File

@@ -21,6 +21,8 @@ using StellaOps.AdvisoryAI.WebService.Security;
using StellaOps.Auth.ServerIntegration.Tenancy;
using System.Collections.Immutable;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using static StellaOps.Localization.T;
@@ -166,6 +168,17 @@ public static class ChatEndpoints
if (!result.Success)
{
if (!result.GuardrailBlocked && !result.QuotaBlocked && !result.ToolAccessDenied)
{
logger.LogWarning(
"Chat gateway runtime fallback activated for tenant {TenantId}, user {UserId}. Reason: {Reason}",
tenantId,
userId,
result.Error ?? "processing_failed");
return Results.Ok(CreateDeterministicFallbackQueryResponse(request, result));
}
var statusCode = result.GuardrailBlocked
? StatusCodes.Status400BadRequest
: result.QuotaBlocked
@@ -858,6 +871,60 @@ public static class ChatEndpoints
} : null
};
}
private static AdvisoryChatQueryResponse CreateDeterministicFallbackQueryResponse(
AdvisoryChatQueryRequest request,
AdvisoryChatServiceResult failedResult)
{
var diagnostics = failedResult.Diagnostics is null
? null
: new DiagnosticsResponse
{
IntentRoutingMs = failedResult.Diagnostics.IntentRoutingMs,
EvidenceAssemblyMs = failedResult.Diagnostics.EvidenceAssemblyMs,
InferenceMs = failedResult.Diagnostics.InferenceMs,
TotalMs = failedResult.Diagnostics.TotalMs,
PromptTokens = failedResult.Diagnostics.PromptTokens,
CompletionTokens = failedResult.Diagnostics.CompletionTokens,
};
var reason = string.IsNullOrWhiteSpace(failedResult.Error)
? "chat runtime unavailable"
: failedResult.Error.Trim();
var normalizedQuery = request.Query?.Trim() ?? string.Empty;
var fallbackId = BuildFallbackResponseId(normalizedQuery, reason);
return new AdvisoryChatQueryResponse
{
ResponseId = fallbackId,
BundleId = null,
Intent = (failedResult.Intent ?? AdvisoryChatIntent.General).ToString(),
GeneratedAt = DateTimeOffset.UtcNow,
Summary =
$"Chat runtime is temporarily unavailable. For \"{normalizedQuery}\", start with unified search evidence, verify VEX status, and confirm active policy gates before acting.",
Confidence = new ConfidenceResponse { Level = ConfidenceLevel.Low.ToString(), Score = 0.2d },
EvidenceLinks = [],
Mitigations = [],
ProposedActions = [],
FollowUp = new FollowUpResponse
{
SuggestedQueries = [normalizedQuery],
NextSteps =
[
"Retry this chat query after runtime recovery.",
"Use global search to review findings, VEX, and policy context now."
]
},
Diagnostics = diagnostics,
};
}
private static string BuildFallbackResponseId(string query, string reason)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes($"{query}|{reason}"));
var token = Convert.ToHexString(bytes.AsSpan(0, 8)).ToLowerInvariant();
return $"fallback-{token}";
}
}
#region Request/Response DTOs

View File

@@ -121,6 +121,24 @@ public static class SearchAnalyticsEndpoints
if (events.Count > 0)
{
if (!string.IsNullOrWhiteSpace(userId))
{
foreach (var evt in events)
{
if (!ShouldPersistHistory(evt))
{
continue;
}
await analyticsService.RecordHistoryAsync(
tenant,
userId,
evt.Query,
evt.ResultCount ?? 0,
cancellationToken).ConfigureAwait(false);
}
}
// Fire-and-forget: do not await in the request pipeline to keep latency low.
// The analytics service already swallows exceptions internally.
_ = analyticsService.RecordEventsAsync(events, CancellationToken.None);
@@ -129,6 +147,22 @@ public static class SearchAnalyticsEndpoints
return Results.NoContent();
}
private static bool ShouldPersistHistory(SearchAnalyticsEvent evt)
{
if (string.IsNullOrWhiteSpace(evt.Query))
{
return false;
}
if (evt.Query.StartsWith("__", StringComparison.Ordinal))
{
return false;
}
return string.Equals(evt.EventType, "query", StringComparison.OrdinalIgnoreCase)
|| string.Equals(evt.EventType, "zero_result", StringComparison.OrdinalIgnoreCase);
}
private static async Task<IResult> GetHistoryAsync(
HttpContext httpContext,
SearchAnalyticsService analyticsService,

View File

@@ -37,8 +37,12 @@ using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.RateLimiting;
using StellaOps.Localization;
using AdvisoryChatModels = StellaOps.AdvisoryAI.Chat.Models;
using AdvisoryChatServices = StellaOps.AdvisoryAI.Chat.Services;
var builder = WebApplication.CreateBuilder(args);
@@ -262,23 +266,33 @@ app.MapGet("/v1/advisory-ai/rate-limits", HandleGetRateLimits)
// Chat endpoints (SPRINT_20260107_006_003 CH-005)
app.MapPost("/v1/advisory-ai/conversations", HandleCreateConversation)
.RequireRateLimiting("advisory-ai")
.RequireAuthorization(AdvisoryAIPolicies.OperatePolicy);
.RequireAuthorization(AdvisoryAIPolicies.OperatePolicy)
.WithSummary("Legacy conversation-creation endpoint (deprecated).")
.WithDescription("Creates a chat conversation using the legacy conversation surface. This endpoint family is deprecated in favor of /api/v1/chat/* and is scheduled for sunset on 2026-12-31 UTC.");
app.MapGet("/v1/advisory-ai/conversations/{conversationId}", HandleGetConversation)
.RequireRateLimiting("advisory-ai")
.RequireAuthorization(AdvisoryAIPolicies.ViewPolicy);
.RequireAuthorization(AdvisoryAIPolicies.ViewPolicy)
.WithSummary("Legacy conversation-read endpoint (deprecated).")
.WithDescription("Returns conversation state from the legacy conversation surface. Migrate readers to /api/v1/chat/* before the 2026-12-31 UTC sunset.");
app.MapPost("/v1/advisory-ai/conversations/{conversationId}/turns", HandleAddTurn)
.RequireRateLimiting("advisory-ai")
.RequireAuthorization(AdvisoryAIPolicies.OperatePolicy);
.RequireAuthorization(AdvisoryAIPolicies.OperatePolicy)
.WithSummary("Legacy conversation add-turn endpoint (deprecated).")
.WithDescription("Adds a conversational turn on the legacy conversation surface. Canonical payload field is content. Legacy message is accepted for compatibility only during the migration window ending 2026-12-31 UTC.");
app.MapDelete("/v1/advisory-ai/conversations/{conversationId}", HandleDeleteConversation)
.RequireRateLimiting("advisory-ai")
.RequireAuthorization(AdvisoryAIPolicies.OperatePolicy);
.RequireAuthorization(AdvisoryAIPolicies.OperatePolicy)
.WithSummary("Legacy conversation delete endpoint (deprecated).")
.WithDescription("Deletes a conversation on the legacy conversation surface. Migrate clients to /api/v1/chat/* before the 2026-12-31 UTC sunset.");
app.MapGet("/v1/advisory-ai/conversations", HandleListConversations)
.RequireRateLimiting("advisory-ai")
.RequireAuthorization(AdvisoryAIPolicies.ViewPolicy);
.RequireAuthorization(AdvisoryAIPolicies.ViewPolicy)
.WithSummary("Legacy conversation list endpoint (deprecated).")
.WithDescription("Lists conversations from the legacy conversation surface. Migrate listing flows to /api/v1/chat/* before the 2026-12-31 UTC sunset.");
// Chat gateway endpoints (controlled conversational interface)
app.MapChatEndpoints();
@@ -1118,6 +1132,7 @@ static async Task<IResult> HandleCreateConversation(
IConversationService conversationService,
CancellationToken cancellationToken)
{
ApplyLegacyConversationHeaders(httpContext);
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.create_conversation", ActivityKind.Server);
activity?.SetTag("advisory.tenant_id", request.TenantId);
@@ -1161,6 +1176,7 @@ static async Task<IResult> HandleGetConversation(
IConversationService conversationService,
CancellationToken cancellationToken)
{
ApplyLegacyConversationHeaders(httpContext);
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.get_conversation", ActivityKind.Server);
activity?.SetTag("advisory.conversation_id", conversationId);
@@ -1184,6 +1200,7 @@ static async Task<IResult> HandleAddTurn(
string conversationId,
StellaOps.AdvisoryAI.WebService.Contracts.AddTurnRequest request,
IConversationService conversationService,
AdvisoryChatServices.IAdvisoryChatService? advisoryChatService,
ChatPromptAssembler? promptAssembler,
ChatResponseStreamer? responseStreamer,
GroundingValidator? groundingValidator,
@@ -1192,15 +1209,39 @@ static async Task<IResult> HandleAddTurn(
ILogger<Program> logger,
CancellationToken cancellationToken)
{
ApplyLegacyConversationHeaders(httpContext);
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.add_turn", ActivityKind.Server);
activity?.SetTag("advisory.conversation_id", conversationId);
activity?.SetTag("advisory.stream", request.Stream);
var normalizedContent = request.EffectiveContent?.Trim();
var usingLegacyMessage = string.IsNullOrWhiteSpace(request.Content) && !string.IsNullOrWhiteSpace(request.Message);
activity?.SetTag("advisory.request_shape", usingLegacyMessage ? "legacy_message" : "content");
if (usingLegacyMessage)
{
var tenantId = ResolveHeaderValue(httpContext, "X-StellaOps-Tenant")
?? ResolveHeaderValue(httpContext, "X-Tenant-Id")
?? "unknown";
logger.LogInformation(
"Legacy chat payload field 'message' used by tenant {TenantId} on endpoint {Endpoint} conversation {ConversationId}.",
tenantId,
httpContext.Request.Path.Value ?? "/v1/advisory-ai/conversations/{conversationId}/turns",
conversationId);
httpContext.Response.Headers.Append(
"Warning",
"299 - Legacy chat payload field 'message' is deprecated; use 'content'.");
}
if (!EnsureChatAuthorized(httpContext))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
if (string.IsNullOrWhiteSpace(normalizedContent))
{
return Results.BadRequest(new { error = "Either 'content' or legacy 'message' must be provided." });
}
var startTime = timeProvider.GetUtcNow();
// Add user turn
@@ -1209,7 +1250,7 @@ static async Task<IResult> HandleAddTurn(
var userTurnRequest = new TurnRequest
{
Role = TurnRole.User,
Content = request.Content,
Content = normalizedContent,
Metadata = request.Metadata?.ToImmutableDictionary()
};
@@ -1217,14 +1258,23 @@ static async Task<IResult> HandleAddTurn(
.ConfigureAwait(false);
activity?.SetTag("advisory.user_turn_id", userTurn.TurnId);
var assistantGeneration = await GenerateAssistantTurnAsync(
httpContext,
conversationId,
normalizedContent,
conversationService,
advisoryChatService,
logger,
cancellationToken).ConfigureAwait(false);
// For now, return a placeholder response since we don't have the full LLM pipeline
// In a complete implementation, this would call the prompt assembler, LLM, and validators
var assistantContent = GeneratePlaceholderResponse(request.Content);
var assistantContent = assistantGeneration.Content;
var assistantTurnRequest = new TurnRequest
{
Role = TurnRole.Assistant,
Content = assistantContent
Content = assistantContent,
EvidenceLinks = assistantGeneration.EvidenceLinks,
ProposedActions = assistantGeneration.ProposedActions,
Metadata = assistantGeneration.Metadata
};
var assistantTurn = await conversationService.AddTurnAsync(conversationId, assistantTurnRequest, cancellationToken)
@@ -1243,8 +1293,8 @@ static async Task<IResult> HandleAddTurn(
ProposedActions = assistantTurn.ProposedActions.IsEmpty
? null
: assistantTurn.ProposedActions.Select(StellaOps.AdvisoryAI.WebService.Contracts.ProposedActionResponse.FromAction).ToList(),
GroundingScore = 1.0, // Placeholder
TokenCount = assistantContent.Split(' ').Length, // Rough estimate
GroundingScore = assistantGeneration.GroundingScore,
TokenCount = assistantGeneration.TokenCount,
DurationMs = (long)elapsed.TotalMilliseconds
};
@@ -1258,14 +1308,18 @@ static async Task<IResult> HandleAddTurn(
{
await httpContext.Response.WriteAsync(
"event: token\n" +
$"data: {assistantContent}\n\n",
$"data: {System.Text.Json.JsonSerializer.Serialize(new { content = assistantContent })}\n\n",
cancellationToken);
await httpContext.Response.WriteAsync(
"event: done\n" +
$"data: {System.Text.Json.JsonSerializer.Serialize(new { turnId = assistantTurn.TurnId, groundingScore = assistantGeneration.GroundingScore })}\n\n",
cancellationToken);
await httpContext.Response.Body.FlushAsync(cancellationToken);
return Results.Empty;
}
await foreach (var streamEvent in responseStreamer.StreamResponseAsync(
StreamPlaceholderTokens(assistantContent, cancellationToken),
StreamContentTokens(assistantContent, cancellationToken),
conversationId,
assistantTurn.TurnId,
cancellationToken))
@@ -1292,6 +1346,7 @@ static async Task<IResult> HandleDeleteConversation(
IConversationService conversationService,
CancellationToken cancellationToken)
{
ApplyLegacyConversationHeaders(httpContext);
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.delete_conversation", ActivityKind.Server);
activity?.SetTag("advisory.conversation_id", conversationId);
@@ -1317,6 +1372,7 @@ static async Task<IResult> HandleListConversations(
IConversationService conversationService,
CancellationToken cancellationToken)
{
ApplyLegacyConversationHeaders(httpContext);
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.list_conversations", ActivityKind.Server);
if (!EnsureChatAuthorized(httpContext))
@@ -1357,6 +1413,13 @@ static async Task<IResult> HandleListConversations(
});
}
static void ApplyLegacyConversationHeaders(HttpContext context)
{
context.Response.Headers["Deprecation"] = "true";
context.Response.Headers["Sunset"] = "Thu, 31 Dec 2026 23:59:59 GMT";
context.Response.Headers["Link"] = "</api/v1/chat/query>; rel=\"successor-version\"";
}
static bool EnsureChatAuthorized(HttpContext context)
{
if (!context.Request.Headers.TryGetValue("X-StellaOps-Scopes", out var scopes))
@@ -1368,20 +1431,427 @@ static bool EnsureChatAuthorized(HttpContext context)
.SelectMany(value => value?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) ?? [])
.ToHashSet(StringComparer.OrdinalIgnoreCase);
return allowed.Contains("advisory:run")
return allowed.Contains("advisory-ai:view")
|| allowed.Contains("advisory-ai:operate")
|| allowed.Contains("advisory-ai:admin")
|| allowed.Contains("advisoryai:view")
|| allowed.Contains("advisoryai:operate")
|| allowed.Contains("advisoryai:admin")
|| allowed.Contains("advisory:run")
|| allowed.Contains("advisory:chat")
|| allowed.Contains("chat:user")
|| allowed.Contains("chat:admin");
}
static string GeneratePlaceholderResponse(string userMessage)
static async Task<AssistantGenerationResult> GenerateAssistantTurnAsync(
HttpContext httpContext,
string conversationId,
string userMessage,
IConversationService conversationService,
AdvisoryChatServices.IAdvisoryChatService? advisoryChatService,
ILogger logger,
CancellationToken cancellationToken)
{
// Placeholder implementation - in production this would call the LLM
return $"I received your message: \"{userMessage}\". This is a placeholder response. " +
"The full chat functionality with grounded responses will be implemented when the LLM pipeline is connected.";
var conversation = await conversationService.GetAsync(conversationId, cancellationToken).ConfigureAwait(false);
if (conversation is null)
{
return CreateDeterministicFallbackTurn(userMessage, "conversation_not_found");
}
if (advisoryChatService is null)
{
return CreateDeterministicFallbackTurn(userMessage, "chat_service_unavailable");
}
try
{
var tenantId = ResolveHeaderValue(httpContext, "X-StellaOps-Tenant")
?? ResolveHeaderValue(httpContext, "X-Tenant-Id")
?? conversation.TenantId;
var userId = ResolveHeaderValue(httpContext, "X-StellaOps-Actor")
?? ResolveHeaderValue(httpContext, "X-User-Id")
?? conversation.UserId;
var runtimeRequest = new AdvisoryChatServices.AdvisoryChatRequest
{
TenantId = tenantId,
UserId = string.IsNullOrWhiteSpace(userId) ? "anonymous" : userId,
UserRoles = ResolveUserRoles(httpContext),
Query = BuildRuntimeQuery(userMessage, conversation.Context),
ArtifactDigest = ResolveArtifactDigest(conversation.Context, userMessage),
ImageReference = null,
Environment = null,
CorrelationId = ResolveHeaderValue(httpContext, "X-Correlation-Id"),
ConversationId = conversationId
};
var serviceResult = await advisoryChatService.ProcessQueryAsync(runtimeRequest, cancellationToken).ConfigureAwait(false);
if (serviceResult.Success && serviceResult.Response is not null)
{
return CreateRuntimeAssistantTurn(serviceResult.Response, serviceResult.Diagnostics);
}
logger.LogInformation(
"Chat runtime fell back to deterministic response for conversation {ConversationId}. Reason: {Reason}",
conversationId,
serviceResult.Error ?? "runtime_unavailable");
return CreateDeterministicFallbackTurn(userMessage, NormalizeReasonCode(serviceResult.Error));
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception ex)
{
logger.LogWarning(
ex,
"Chat runtime invocation failed for conversation {ConversationId}; using deterministic fallback.",
conversationId);
return CreateDeterministicFallbackTurn(userMessage, "runtime_exception");
}
}
static async IAsyncEnumerable<TokenChunk> StreamPlaceholderTokens(
static AssistantGenerationResult CreateRuntimeAssistantTurn(
AdvisoryChatModels.AdvisoryChatResponse response,
AdvisoryChatServices.AdvisoryChatDiagnostics? diagnostics)
{
var content = BuildRuntimeAssistantContent(response);
var evidenceLinks = response.EvidenceLinks
.Select(MapEvidenceLink)
.ToImmutableArray();
var proposedActions = response.ProposedActions
.Select(MapProposedAction)
.ToImmutableArray();
var metadata = ImmutableDictionary<string, string>.Empty
.Add("runtime", "advisory_chat_service")
.Add("response_id", response.ResponseId);
if (!string.IsNullOrWhiteSpace(response.BundleId))
{
metadata = metadata.Add("bundle_id", response.BundleId);
}
if (diagnostics is not null)
{
metadata = metadata
.Add("diagnostics_total_ms", diagnostics.TotalMs.ToString(System.Globalization.CultureInfo.InvariantCulture))
.Add("diagnostics_inference_ms", diagnostics.InferenceMs.ToString(System.Globalization.CultureInfo.InvariantCulture));
}
return new AssistantGenerationResult(
Content: content,
EvidenceLinks: evidenceLinks,
ProposedActions: proposedActions,
GroundingScore: Clamp01(response.Confidence.Score),
TokenCount: diagnostics?.CompletionTokens > 0 ? diagnostics.CompletionTokens : CountTokens(content),
Metadata: metadata);
}
static AssistantGenerationResult CreateDeterministicFallbackTurn(string userMessage, string reasonCode)
{
var normalizedMessage = userMessage.Trim();
if (normalizedMessage.Length > 140)
{
normalizedMessage = normalizedMessage[..140] + "...";
}
var content =
$"I couldn't complete grounded AdvisoryAI analysis ({reasonCode}). " +
"Provide a finding/CVE ID and artifact digest (sha256:...) for evidence-backed guidance. " +
$"Request noted: \"{normalizedMessage}\". " +
"[docs:modules/advisory-ai/architecture.md]";
var evidence = ImmutableArray.Create(new EvidenceLink
{
Type = EvidenceLinkType.Documentation,
Uri = "docs:modules/advisory-ai/architecture.md",
Label = "Advisory AI architecture",
Confidence = 0.2
});
var metadata = ImmutableDictionary<string, string>.Empty
.Add("runtime", "deterministic_fallback")
.Add("reason", reasonCode);
return new AssistantGenerationResult(
Content: content,
EvidenceLinks: evidence,
ProposedActions: ImmutableArray<ProposedAction>.Empty,
GroundingScore: 0.25,
TokenCount: CountTokens(content),
Metadata: metadata);
}
static string BuildRuntimeAssistantContent(AdvisoryChatModels.AdvisoryChatResponse response)
{
var summary = response.Summary?.Trim();
if (string.IsNullOrWhiteSpace(summary))
{
summary = "I reviewed your request and assembled available evidence.";
}
var builder = new StringBuilder(summary);
var suggestions = response.FollowUp?.SuggestedQueries
.Where(static query => !string.IsNullOrWhiteSpace(query))
.Take(2)
.ToArray();
if (suggestions is { Length: > 0 })
{
builder.AppendLine();
builder.AppendLine();
builder.AppendLine("Try next:");
foreach (var suggestion in suggestions)
{
builder.Append("- ").AppendLine(suggestion.Trim());
}
}
var citations = response.EvidenceLinks
.Select(ToCitationToken)
.Where(static token => !string.IsNullOrWhiteSpace(token))
.Take(4)
.ToArray();
if (citations.Length > 0)
{
builder.AppendLine();
builder.AppendLine();
builder.Append("Evidence: ").Append(string.Join(' ', citations));
}
return builder.ToString().Trim();
}
static EvidenceLink MapEvidenceLink(AdvisoryChatModels.EvidenceLink link)
{
return new EvidenceLink
{
Type = link.Type switch
{
AdvisoryChatModels.EvidenceLinkType.Sbom => EvidenceLinkType.Sbom,
AdvisoryChatModels.EvidenceLinkType.Vex => EvidenceLinkType.Vex,
AdvisoryChatModels.EvidenceLinkType.Reach => EvidenceLinkType.Reachability,
AdvisoryChatModels.EvidenceLinkType.Runtime => EvidenceLinkType.RuntimeTrace,
AdvisoryChatModels.EvidenceLinkType.Attest => EvidenceLinkType.Dsse,
AdvisoryChatModels.EvidenceLinkType.Policy => EvidenceLinkType.Documentation,
AdvisoryChatModels.EvidenceLinkType.Binpatch => EvidenceLinkType.Other,
AdvisoryChatModels.EvidenceLinkType.Opsmem => EvidenceLinkType.Documentation,
_ => EvidenceLinkType.Other
},
Uri = string.IsNullOrWhiteSpace(link.Link)
? $"other:{link.Type.ToString().ToLowerInvariant()}"
: link.Link.Trim(),
Label = string.IsNullOrWhiteSpace(link.Description) ? null : link.Description.Trim(),
Confidence = link.Confidence switch
{
AdvisoryChatModels.ConfidenceLevel.High => 0.9,
AdvisoryChatModels.ConfidenceLevel.Medium => 0.7,
AdvisoryChatModels.ConfidenceLevel.Low => 0.4,
AdvisoryChatModels.ConfidenceLevel.InsufficientEvidence => 0.2,
_ => null
}
};
}
static ProposedAction MapProposedAction(AdvisoryChatModels.ProposedAction action)
{
var mappedActionType = action.ActionType switch
{
AdvisoryChatModels.ProposedActionType.CreateVex => "create_vex",
AdvisoryChatModels.ProposedActionType.GeneratePr => "generate_manifest",
AdvisoryChatModels.ProposedActionType.CreateTicket => "escalate",
AdvisoryChatModels.ProposedActionType.Approve => "approve",
AdvisoryChatModels.ProposedActionType.Quarantine => "quarantine",
AdvisoryChatModels.ProposedActionType.Defer => "defer",
AdvisoryChatModels.ProposedActionType.Waive => "defer",
AdvisoryChatModels.ProposedActionType.Escalate => "escalate",
_ => "dismiss"
};
return new ProposedAction
{
ActionType = mappedActionType,
Label = string.IsNullOrWhiteSpace(action.Label) ? mappedActionType : action.Label.Trim(),
Rationale = action.Description,
Parameters = action.Parameters ?? ImmutableDictionary<string, string>.Empty,
RequiresConfirmation = action.RequiresApproval ?? true,
PolicyGate = action.RiskLevel?.ToString()
};
}
static string BuildRuntimeQuery(string userMessage, ConversationContext context)
{
var normalized = userMessage.Trim();
var contextHints = new List<string>();
if (!string.IsNullOrWhiteSpace(context.CurrentCveId)
&& normalized.IndexOf(context.CurrentCveId, StringComparison.OrdinalIgnoreCase) < 0)
{
contextHints.Add($"CVE: {context.CurrentCveId.Trim()}");
}
if (!string.IsNullOrWhiteSpace(context.FindingId)
&& normalized.IndexOf(context.FindingId, StringComparison.OrdinalIgnoreCase) < 0)
{
contextHints.Add($"Finding: {context.FindingId.Trim()}");
}
if (contextHints.Count == 0)
{
return normalized;
}
return $"{normalized}{Environment.NewLine}{Environment.NewLine}Context:{Environment.NewLine}- {string.Join(Environment.NewLine + "- ", contextHints)}";
}
static string? ResolveArtifactDigest(ConversationContext context, string userMessage)
{
if (!string.IsNullOrWhiteSpace(context.CurrentImageDigest))
{
return context.CurrentImageDigest.Trim();
}
var match = Regex.Match(userMessage, @"sha256:[a-f0-9]{16,}", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
return match.Success ? match.Value.ToLowerInvariant() : null;
}
static string? ResolveHeaderValue(HttpContext context, string headerName)
{
if (!context.Request.Headers.TryGetValue(headerName, out var values))
{
return null;
}
foreach (var value in values)
{
if (!string.IsNullOrWhiteSpace(value))
{
return value.Trim();
}
}
return null;
}
static ImmutableArray<string> ResolveUserRoles(HttpContext context)
{
if (!context.Request.Headers.TryGetValue("X-StellaOps-Roles", out var roleValues))
{
return ImmutableArray<string>.Empty;
}
var roles = roleValues
.SelectMany(value => value?.Split(new[] { ' ', ',' }, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) ?? [])
.Where(static role => !string.IsNullOrWhiteSpace(role))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return roles;
}
static string NormalizeReasonCode(string? reason)
{
if (string.IsNullOrWhiteSpace(reason))
{
return "runtime_unavailable";
}
var normalized = Regex.Replace(
reason,
@"[^a-z0-9]+",
"_",
RegexOptions.IgnoreCase | RegexOptions.CultureInvariant).Trim('_').ToLowerInvariant();
if (normalized.Length > 40)
{
normalized = normalized[..40];
}
return string.IsNullOrWhiteSpace(normalized) ? "runtime_unavailable" : normalized;
}
static string? ToCitationToken(AdvisoryChatModels.EvidenceLink link)
{
var type = link.Type switch
{
AdvisoryChatModels.EvidenceLinkType.Sbom => "sbom",
AdvisoryChatModels.EvidenceLinkType.Reach => "reach",
AdvisoryChatModels.EvidenceLinkType.Runtime => "runtime",
AdvisoryChatModels.EvidenceLinkType.Vex => "vex",
AdvisoryChatModels.EvidenceLinkType.Attest => "attest",
AdvisoryChatModels.EvidenceLinkType.Policy => "policy",
AdvisoryChatModels.EvidenceLinkType.Binpatch => "docs",
AdvisoryChatModels.EvidenceLinkType.Opsmem => "docs",
_ => "docs"
};
var path = ExtractCitationPath(link.Link);
if (string.IsNullOrWhiteSpace(path))
{
path = link.Description;
}
if (string.IsNullOrWhiteSpace(path))
{
return null;
}
var sanitizedPath = path.Trim().Replace("[", "(").Replace("]", ")");
return $"[{type}:{sanitizedPath}]";
}
static string? ExtractCitationPath(string? uri)
{
if (string.IsNullOrWhiteSpace(uri))
{
return null;
}
var trimmed = uri.Trim();
if (trimmed.Contains("://", StringComparison.Ordinal))
{
return trimmed;
}
var separatorIndex = trimmed.IndexOf(':');
if (separatorIndex > 0 && separatorIndex < trimmed.Length - 1)
{
return trimmed[(separatorIndex + 1)..];
}
return trimmed;
}
static double Clamp01(double value)
{
if (double.IsNaN(value) || double.IsInfinity(value))
{
return 0;
}
if (value < 0)
{
return 0;
}
return value > 1 ? 1 : value;
}
static int CountTokens(string content)
{
if (string.IsNullOrWhiteSpace(content))
{
return 0;
}
return Regex.Split(content.Trim(), @"\s+", RegexOptions.CultureInvariant).Length;
}
static async IAsyncEnumerable<TokenChunk> StreamContentTokens(
string content,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
@@ -1395,6 +1865,14 @@ static async IAsyncEnumerable<TokenChunk> StreamPlaceholderTokens(
}
}
internal sealed record AssistantGenerationResult(
string Content,
ImmutableArray<EvidenceLink> EvidenceLinks,
ImmutableArray<ProposedAction> ProposedActions,
double GroundingScore,
int TokenCount,
ImmutableDictionary<string, string> Metadata);
internal sealed record PipelinePlanRequest(
AdvisoryTaskType? TaskType,
string AdvisoryKey,

View File

@@ -200,7 +200,7 @@ public sealed class ChatResponseStreamer
// Pattern: [type:path]
var matches = System.Text.RegularExpressions.Regex.Matches(
content,
@"\[(?<type>sbom|reach|runtime|vex|attest|auth|docs):(?<path>[^\]]+)\]");
@"\[(?<type>sbom|reach|runtime|vex|attest|auth|docs|finding|scan|policy):(?<path>[^\]]+)\]");
for (int i = existingCount; i < matches.Count; i++)
{

View File

@@ -95,6 +95,17 @@ public sealed class KnowledgeSearchOptions
/// </summary>
public bool RoleBasedBiasEnabled { get; set; } = true;
/// <summary>
/// Enables periodic quality-alert refresh from analytics and feedback signals.
/// </summary>
public bool SearchQualityMonitorEnabled { get; set; } = true;
/// <summary>
/// Interval in seconds for quality-monitor refresh.
/// </summary>
[Range(30, 86400)]
public int SearchQualityMonitorIntervalSeconds { get; set; } = 300;
// ── Live adapter settings (Sprint 103 / G2) ──
/// <summary>Base URL for the Scanner microservice (e.g. "http://scanner:8080").</summary>

View File

@@ -1,4 +1,4 @@
[
[
{
"checkCode": "check.core.disk.space",
"title": "Speicherplatzverfügbarkeit",
@@ -168,3 +168,4 @@
]
}
]

View File

@@ -1,4 +1,4 @@
[
[
{
"checkCode": "check.core.disk.space",
"title": "Disponibilité de l'espace disque",

View File

@@ -9,6 +9,9 @@ internal sealed class SearchAnalyticsService
{
private readonly KnowledgeSearchOptions _options;
private readonly ILogger<SearchAnalyticsService> _logger;
private readonly object _fallbackLock = new();
private readonly List<(SearchAnalyticsEvent Event, DateTimeOffset RecordedAt)> _fallbackEvents = [];
private readonly Dictionary<(string TenantId, string UserId, string Query), SearchHistoryEntry> _fallbackHistory = new();
public SearchAnalyticsService(
IOptions<KnowledgeSearchOptions> options,
@@ -20,7 +23,12 @@ internal sealed class SearchAnalyticsService
public async Task RecordEventAsync(SearchAnalyticsEvent evt, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return;
var recordedAt = DateTimeOffset.UtcNow;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
RecordFallbackEvent(evt, recordedAt);
return;
}
try
{
@@ -42,16 +50,32 @@ internal sealed class SearchAnalyticsService
cmd.Parameters.AddWithValue("duration_ms", (object?)evt.DurationMs ?? DBNull.Value);
await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
RecordFallbackEvent(evt, recordedAt);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to record search analytics event");
RecordFallbackEvent(evt, recordedAt);
}
}
public async Task RecordEventsAsync(IReadOnlyList<SearchAnalyticsEvent> events, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString) || events.Count == 0) return;
if (events.Count == 0)
{
return;
}
var recordedAt = DateTimeOffset.UtcNow;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
foreach (var evt in events)
{
RecordFallbackEvent(evt, recordedAt);
}
return;
}
try
{
@@ -75,18 +99,27 @@ internal sealed class SearchAnalyticsService
cmd.Parameters.AddWithValue("duration_ms", (object?)evt.DurationMs ?? DBNull.Value);
await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
RecordFallbackEvent(evt, recordedAt);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to record search analytics events batch ({Count} events)", events.Count);
foreach (var evt in events)
{
RecordFallbackEvent(evt, recordedAt);
}
}
}
public async Task<IReadOnlyDictionary<string, int>> GetPopularityMapAsync(string tenantId, int days = 30, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
return BuildFallbackPopularityMap(tenantId, days);
}
var map = new Dictionary<string, int>(StringComparer.Ordinal);
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return map;
try
{
@@ -116,6 +149,7 @@ internal sealed class SearchAnalyticsService
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load popularity map");
return BuildFallbackPopularityMap(tenantId, days);
}
return map;
@@ -123,7 +157,12 @@ internal sealed class SearchAnalyticsService
public async Task RecordHistoryAsync(string tenantId, string userId, string query, int resultCount, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return;
var recordedAt = DateTimeOffset.UtcNow;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
RecordFallbackHistory(tenantId, userId, query, resultCount, recordedAt);
return;
}
try
{
@@ -156,17 +195,23 @@ internal sealed class SearchAnalyticsService
trimCmd.Parameters.AddWithValue("tenant_id", tenantId);
trimCmd.Parameters.AddWithValue("user_id", userId);
await trimCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
RecordFallbackHistory(tenantId, userId, query, resultCount, recordedAt);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to record search history");
RecordFallbackHistory(tenantId, userId, query, resultCount, recordedAt);
}
}
public async Task<IReadOnlyList<SearchHistoryEntry>> GetHistoryAsync(string tenantId, string userId, int limit = 50, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
return GetFallbackHistory(tenantId, userId, limit);
}
var entries = new List<SearchHistoryEntry>();
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return entries;
try
{
@@ -197,6 +242,7 @@ internal sealed class SearchAnalyticsService
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load search history");
return GetFallbackHistory(tenantId, userId, limit);
}
return entries;
@@ -204,7 +250,11 @@ internal sealed class SearchAnalyticsService
public async Task ClearHistoryAsync(string tenantId, string userId, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
ClearFallbackHistory(tenantId, userId);
return;
}
try
{
@@ -219,10 +269,12 @@ internal sealed class SearchAnalyticsService
cmd.Parameters.AddWithValue("user_id", userId);
await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
ClearFallbackHistory(tenantId, userId);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to clear search history");
ClearFallbackHistory(tenantId, userId);
}
}
@@ -236,9 +288,14 @@ internal sealed class SearchAnalyticsService
string tenantId, string query, int limit = 3, CancellationToken ct = default)
{
var results = new List<string>();
if (string.IsNullOrWhiteSpace(_options.ConnectionString) || string.IsNullOrWhiteSpace(query))
if (string.IsNullOrWhiteSpace(query))
return results;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
return FindFallbackSimilarQueries(tenantId, query, limit);
}
try
{
await using var conn = new NpgsqlConnection(_options.ConnectionString);
@@ -268,6 +325,7 @@ internal sealed class SearchAnalyticsService
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to find similar successful queries for '{Query}'", query);
return FindFallbackSimilarQueries(tenantId, query, limit);
}
return results;
@@ -275,7 +333,11 @@ internal sealed class SearchAnalyticsService
public async Task DeleteHistoryEntryAsync(string tenantId, string userId, string historyId, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
DeleteFallbackHistoryEntry(tenantId, userId, historyId);
return;
}
if (!Guid.TryParse(historyId, out _)) return;
@@ -293,12 +355,204 @@ internal sealed class SearchAnalyticsService
cmd.Parameters.AddWithValue("history_id", Guid.Parse(historyId));
await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
DeleteFallbackHistoryEntry(tenantId, userId, historyId);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to delete search history entry");
DeleteFallbackHistoryEntry(tenantId, userId, historyId);
}
}
internal IReadOnlyList<(SearchAnalyticsEvent Event, DateTimeOffset RecordedAt)> GetFallbackEventsSnapshot(
string tenantId,
TimeSpan window)
{
var cutoff = DateTimeOffset.UtcNow - window;
lock (_fallbackLock)
{
return _fallbackEvents
.Where(item => item.Event.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase))
.Where(item => item.RecordedAt >= cutoff)
.OrderBy(item => item.RecordedAt)
.ToArray();
}
}
internal IReadOnlySet<string> GetKnownFallbackTenants()
{
lock (_fallbackLock)
{
return _fallbackEvents
.Select(item => item.Event.TenantId)
.Where(static t => !string.IsNullOrWhiteSpace(t))
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToHashSet(StringComparer.OrdinalIgnoreCase);
}
}
private IReadOnlyDictionary<string, int> BuildFallbackPopularityMap(string tenantId, int days)
{
var cutoff = DateTimeOffset.UtcNow - TimeSpan.FromDays(Math.Max(1, days));
lock (_fallbackLock)
{
return _fallbackEvents
.Where(item => item.RecordedAt >= cutoff)
.Select(item => item.Event)
.Where(evt => evt.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase))
.Where(evt => evt.EventType.Equals("click", StringComparison.OrdinalIgnoreCase))
.Where(evt => !string.IsNullOrWhiteSpace(evt.EntityKey))
.GroupBy(evt => evt.EntityKey!, StringComparer.Ordinal)
.ToDictionary(group => group.Key, group => group.Count(), StringComparer.Ordinal);
}
}
private IReadOnlyList<SearchHistoryEntry> GetFallbackHistory(string tenantId, string userId, int limit)
{
lock (_fallbackLock)
{
return _fallbackHistory
.Where(item =>
item.Key.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase) &&
item.Key.UserId.Equals(userId, StringComparison.OrdinalIgnoreCase))
.Select(item => item.Value)
.OrderByDescending(entry => entry.SearchedAt)
.Take(Math.Max(1, limit))
.ToArray();
}
}
private void RecordFallbackEvent(SearchAnalyticsEvent evt, DateTimeOffset recordedAt)
{
lock (_fallbackLock)
{
_fallbackEvents.Add((evt, recordedAt));
if (_fallbackEvents.Count > 20_000)
{
_fallbackEvents.RemoveRange(0, _fallbackEvents.Count - 20_000);
}
}
}
private void RecordFallbackHistory(string tenantId, string userId, string query, int resultCount, DateTimeOffset recordedAt)
{
if (string.IsNullOrWhiteSpace(tenantId) || string.IsNullOrWhiteSpace(userId) || string.IsNullOrWhiteSpace(query))
{
return;
}
var normalizedQuery = query.Trim();
(string TenantId, string UserId, string Query) key = (tenantId, userId, normalizedQuery);
var historyId = BuildFallbackHistoryId(tenantId, userId, normalizedQuery);
var entry = new SearchHistoryEntry(historyId, normalizedQuery, resultCount, recordedAt.UtcDateTime);
lock (_fallbackLock)
{
_fallbackHistory[key] = entry;
var overflow = _fallbackHistory.Keys
.Where(k => k.TenantId == key.TenantId && k.UserId == key.UserId)
.Select(k => (Key: k, Entry: _fallbackHistory[k]))
.OrderByDescending(item => item.Entry.SearchedAt)
.Skip(50)
.Select(item => item.Key)
.ToArray();
foreach (var removeKey in overflow)
{
_fallbackHistory.Remove(removeKey);
}
}
}
private void ClearFallbackHistory(string tenantId, string userId)
{
lock (_fallbackLock)
{
var keys = _fallbackHistory.Keys
.Where(key => key.TenantId == tenantId && key.UserId == userId)
.ToArray();
foreach (var key in keys)
{
_fallbackHistory.Remove(key);
}
}
}
private void DeleteFallbackHistoryEntry(string tenantId, string userId, string historyId)
{
if (string.IsNullOrWhiteSpace(historyId))
{
return;
}
lock (_fallbackLock)
{
var hit = _fallbackHistory.Keys
.FirstOrDefault(key =>
key.TenantId == tenantId &&
key.UserId == userId &&
BuildFallbackHistoryId(key.TenantId, key.UserId, key.Query).Equals(historyId, StringComparison.Ordinal));
if (!string.IsNullOrWhiteSpace(hit.TenantId))
{
_fallbackHistory.Remove(hit);
}
}
}
private IReadOnlyList<string> FindFallbackSimilarQueries(string tenantId, string query, int limit)
{
var normalized = query.Trim();
if (string.IsNullOrWhiteSpace(normalized))
{
return [];
}
lock (_fallbackLock)
{
return _fallbackHistory
.Where(item => item.Key.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase))
.Select(item => item.Value)
.Where(entry => !string.Equals(entry.Query, normalized, StringComparison.OrdinalIgnoreCase))
.Select(entry => (entry.Query, Score: ComputeTokenSimilarity(entry.Query, normalized)))
.Where(item => item.Score > 0.2d)
.OrderByDescending(item => item.Score)
.ThenBy(item => item.Query, StringComparer.OrdinalIgnoreCase)
.Take(Math.Max(1, limit))
.Select(item => item.Query)
.ToArray();
}
}
private static string BuildFallbackHistoryId(string tenantId, string userId, string query)
{
var normalizedQuery = query.Trim().ToLowerInvariant();
var hash = System.Security.Cryptography.SHA256.HashData(
System.Text.Encoding.UTF8.GetBytes($"{tenantId}|{userId}|{normalizedQuery}"));
var guidBytes = hash[..16];
return new Guid(guidBytes).ToString("D");
}
private static double ComputeTokenSimilarity(string a, string b)
{
var left = a.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Select(static token => token.ToLowerInvariant())
.ToHashSet(StringComparer.Ordinal);
var right = b.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)
.Select(static token => token.ToLowerInvariant())
.ToHashSet(StringComparer.Ordinal);
if (left.Count == 0 || right.Count == 0)
{
return 0d;
}
var intersection = left.Intersect(right, StringComparer.Ordinal).Count();
var union = left.Union(right, StringComparer.Ordinal).Count();
return union == 0 ? 0d : (double)intersection / union;
}
}
internal record SearchAnalyticsEvent(

View File

@@ -14,23 +14,38 @@ internal sealed class SearchQualityMonitor
{
private static readonly HashSet<string> AllowedSignals = new(StringComparer.Ordinal) { "helpful", "not_helpful" };
private static readonly HashSet<string> AllowedAlertStatuses = new(StringComparer.Ordinal) { "acknowledged", "resolved" };
private const int DefaultAlertWindowDays = 7;
private const int ZeroResultAlertThreshold = 3;
private const int NegativeFeedbackAlertThreshold = 3;
private readonly KnowledgeSearchOptions _options;
private readonly ILogger<SearchQualityMonitor> _logger;
private readonly SearchAnalyticsService _analyticsService;
private readonly object _fallbackLock = new();
private readonly List<(SearchFeedbackEntry Entry, DateTimeOffset CreatedAt)> _fallbackFeedback = [];
private readonly List<SearchQualityAlertEntry> _fallbackAlerts = [];
public SearchQualityMonitor(
IOptions<KnowledgeSearchOptions> options,
ILogger<SearchQualityMonitor> logger)
ILogger<SearchQualityMonitor> logger,
SearchAnalyticsService? analyticsService = null)
{
_options = options.Value;
_logger = logger;
_analyticsService = analyticsService ??
new SearchAnalyticsService(options, Microsoft.Extensions.Logging.Abstractions.NullLogger<SearchAnalyticsService>.Instance);
}
// ----- Feedback CRUD -----
public async Task StoreFeedbackAsync(SearchFeedbackEntry entry, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return;
var createdAt = DateTimeOffset.UtcNow;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
StoreFallbackFeedback(entry, createdAt);
return;
}
try
{
@@ -53,15 +68,114 @@ internal sealed class SearchQualityMonitor
cmd.Parameters.AddWithValue("comment", (object?)entry.Comment ?? DBNull.Value);
await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
StoreFallbackFeedback(entry, createdAt);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to store search feedback");
StoreFallbackFeedback(entry, createdAt);
}
}
// ----- Quality Alerts -----
public async Task<int> RefreshAlertsForKnownTenantsAsync(CancellationToken ct = default)
{
var tenants = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var tenant in _analyticsService.GetKnownFallbackTenants())
{
tenants.Add(tenant);
}
lock (_fallbackLock)
{
foreach (var item in _fallbackFeedback)
{
tenants.Add(item.Entry.TenantId);
}
foreach (var alert in _fallbackAlerts)
{
tenants.Add(alert.TenantId);
}
}
if (!string.IsNullOrWhiteSpace(_options.ConnectionString))
{
try
{
await using var conn = new NpgsqlConnection(_options.ConnectionString);
await conn.OpenAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(@"
SELECT DISTINCT tenant_id FROM advisoryai.search_events
UNION
SELECT DISTINCT tenant_id FROM advisoryai.search_feedback
UNION
SELECT DISTINCT tenant_id FROM advisoryai.search_quality_alerts", conn);
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
while (await reader.ReadAsync(ct).ConfigureAwait(false))
{
if (!reader.IsDBNull(0))
{
tenants.Add(reader.GetString(0));
}
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to enumerate tenants for quality alert refresh.");
}
}
var refreshed = 0;
foreach (var tenantId in tenants)
{
await RefreshAlertsAsync(tenantId, ct).ConfigureAwait(false);
refreshed++;
}
return refreshed;
}
public async Task RefreshAlertsAsync(string tenantId, CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
return;
}
var window = TimeSpan.FromDays(DefaultAlertWindowDays);
var zeroResultCandidates = await LoadZeroResultCandidatesAsync(tenantId, window, ct).ConfigureAwait(false);
foreach (var candidate in zeroResultCandidates)
{
await UpsertAlertAsync(
tenantId,
alertType: "zero_result",
candidate.Query,
candidate.OccurrenceCount,
candidate.FirstSeen,
candidate.LastSeen,
ct).ConfigureAwait(false);
}
var negativeFeedbackCandidates = await LoadNegativeFeedbackCandidatesAsync(tenantId, window, ct).ConfigureAwait(false);
foreach (var candidate in negativeFeedbackCandidates)
{
await UpsertAlertAsync(
tenantId,
alertType: "high_negative_feedback",
candidate.Query,
candidate.OccurrenceCount,
candidate.FirstSeen,
candidate.LastSeen,
ct).ConfigureAwait(false);
}
}
public async Task<IReadOnlyList<SearchQualityAlertEntry>> GetAlertsAsync(
string tenantId,
string? status = null,
@@ -70,7 +184,22 @@ internal sealed class SearchQualityMonitor
CancellationToken ct = default)
{
var alerts = new List<SearchQualityAlertEntry>();
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return alerts;
await RefreshAlertsAsync(tenantId, ct).ConfigureAwait(false);
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
lock (_fallbackLock)
{
return _fallbackAlerts
.Where(entry => entry.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase))
.Where(entry => string.IsNullOrWhiteSpace(status) || entry.Status.Equals(status, StringComparison.Ordinal))
.Where(entry => string.IsNullOrWhiteSpace(alertType) || entry.AlertType.Equals(alertType, StringComparison.Ordinal))
.OrderByDescending(entry => entry.OccurrenceCount)
.ThenByDescending(entry => entry.LastSeen)
.Take(Math.Max(1, limit))
.Select(CloneAlertEntry)
.ToArray();
}
}
try
{
@@ -132,10 +261,49 @@ internal sealed class SearchQualityMonitor
string? resolution,
CancellationToken ct = default)
{
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return null;
if (string.IsNullOrWhiteSpace(tenantId)) return null;
if (!Guid.TryParse(alertId, out var parsedAlertId)) return null;
if (!AllowedAlertStatuses.Contains(status)) return null;
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
lock (_fallbackLock)
{
var existing = _fallbackAlerts.FirstOrDefault(entry =>
entry.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase) &&
entry.AlertId.Equals(alertId, StringComparison.OrdinalIgnoreCase));
if (existing is null)
{
return null;
}
var updated = new SearchQualityAlertEntry
{
AlertId = existing.AlertId,
TenantId = existing.TenantId,
AlertType = existing.AlertType,
Query = existing.Query,
OccurrenceCount = existing.OccurrenceCount,
FirstSeen = existing.FirstSeen,
LastSeen = existing.LastSeen,
Status = status,
Resolution = resolution,
CreatedAt = existing.CreatedAt,
};
var index = _fallbackAlerts.FindIndex(entry =>
entry.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase) &&
entry.AlertId.Equals(alertId, StringComparison.OrdinalIgnoreCase));
if (index >= 0)
{
_fallbackAlerts[index] = updated;
}
return CloneAlertEntry(updated);
}
}
try
{
await using var conn = new NpgsqlConnection(_options.ConnectionString);
@@ -174,6 +342,13 @@ internal sealed class SearchQualityMonitor
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to update search quality alert {AlertId}", alertId);
lock (_fallbackLock)
{
var existing = _fallbackAlerts.FirstOrDefault(entry =>
entry.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase) &&
entry.AlertId.Equals(alertId, StringComparison.OrdinalIgnoreCase));
return existing is null ? null : CloneAlertEntry(existing);
}
}
return null;
@@ -187,14 +362,13 @@ internal sealed class SearchQualityMonitor
CancellationToken ct = default)
{
var metrics = new SearchQualityMetricsEntry { Period = period };
if (string.IsNullOrWhiteSpace(_options.ConnectionString)) return metrics;
var days = period switch
var days = ResolvePeriodDays(period);
if (string.IsNullOrWhiteSpace(_options.ConnectionString))
{
"24h" => 1,
"30d" => 30,
_ => 7,
};
return BuildFallbackMetrics(tenantId, days, period);
}
metrics.Period = period;
try
{
@@ -204,12 +378,18 @@ internal sealed class SearchQualityMonitor
// Total searches and zero-result rate from search_events
await using var searchCmd = new NpgsqlCommand(@"
SELECT
COUNT(*) AS total_searches,
COALESCE(AVG(CASE WHEN result_count = 0 THEN 1.0 ELSE 0.0 END), 0) AS zero_result_rate,
COALESCE(AVG(result_count), 0) AS avg_result_count
COUNT(*) FILTER (WHERE event_type IN ('query', 'zero_result')) AS total_searches,
COALESCE(
COUNT(*) FILTER (WHERE event_type = 'zero_result')::double precision /
NULLIF(COUNT(*) FILTER (WHERE event_type IN ('query', 'zero_result')), 0),
0
) AS zero_result_rate,
COALESCE(
AVG(result_count) FILTER (WHERE event_type IN ('query', 'zero_result') AND result_count IS NOT NULL),
0
) AS avg_result_count
FROM advisoryai.search_events
WHERE event_type = 'search'
AND tenant_id = @tenant_id
WHERE tenant_id = @tenant_id
AND created_at > now() - make_interval(days => @days)", conn);
searchCmd.Parameters.AddWithValue("tenant_id", tenantId);
@@ -244,11 +424,346 @@ internal sealed class SearchQualityMonitor
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load search quality metrics");
return BuildFallbackMetrics(tenantId, days, period);
}
return metrics;
}
private static int ResolvePeriodDays(string period)
{
return period switch
{
"24h" => 1,
"30d" => 30,
_ => 7,
};
}
private SearchQualityMetricsEntry BuildFallbackMetrics(string tenantId, int days, string period)
{
var window = TimeSpan.FromDays(Math.Max(1, days));
var events = _analyticsService.GetFallbackEventsSnapshot(tenantId, window)
.Select(item => item.Event)
.ToArray();
var totalSearches = events.Count(evt =>
evt.EventType.Equals("query", StringComparison.OrdinalIgnoreCase) ||
evt.EventType.Equals("zero_result", StringComparison.OrdinalIgnoreCase));
var zeroResults = events.Count(evt => evt.EventType.Equals("zero_result", StringComparison.OrdinalIgnoreCase));
var avgResultCount = events
.Where(evt => evt.EventType.Equals("query", StringComparison.OrdinalIgnoreCase) || evt.EventType.Equals("zero_result", StringComparison.OrdinalIgnoreCase))
.Where(evt => evt.ResultCount.HasValue)
.Select(evt => evt.ResultCount!.Value)
.DefaultIfEmpty(0)
.Average();
var feedbackSignals = GetFallbackFeedback(tenantId, window)
.Select(item => item.Entry.Signal)
.ToArray();
var helpfulCount = feedbackSignals.Count(signal => signal.Equals("helpful", StringComparison.Ordinal));
var feedbackScore = feedbackSignals.Length == 0
? 0d
: (double)helpfulCount / feedbackSignals.Length * 100d;
return new SearchQualityMetricsEntry
{
TotalSearches = totalSearches,
ZeroResultRate = totalSearches == 0 ? 0d : Math.Round((double)zeroResults / totalSearches * 100d, 1),
AvgResultCount = Math.Round(avgResultCount, 1),
FeedbackScore = Math.Round(feedbackScore, 1),
Period = period,
};
}
private async Task<IReadOnlyList<AlertCandidate>> LoadZeroResultCandidatesAsync(
string tenantId,
TimeSpan window,
CancellationToken ct)
{
var candidates = new List<AlertCandidate>();
var days = Math.Max(1, (int)Math.Ceiling(window.TotalDays));
if (!string.IsNullOrWhiteSpace(_options.ConnectionString))
{
try
{
await using var conn = new NpgsqlConnection(_options.ConnectionString);
await conn.OpenAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(@"
SELECT query, COUNT(*)::int AS occurrence_count, MIN(created_at), MAX(created_at)
FROM advisoryai.search_events
WHERE tenant_id = @tenant_id
AND event_type = 'zero_result'
AND created_at > now() - make_interval(days => @days)
GROUP BY query
HAVING COUNT(*) >= @threshold
ORDER BY occurrence_count DESC", conn);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
cmd.Parameters.AddWithValue("days", days);
cmd.Parameters.AddWithValue("threshold", ZeroResultAlertThreshold);
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
while (await reader.ReadAsync(ct).ConfigureAwait(false))
{
candidates.Add(new AlertCandidate(
reader.GetString(0),
reader.GetInt32(1),
new DateTimeOffset(reader.GetDateTime(2), TimeSpan.Zero),
new DateTimeOffset(reader.GetDateTime(3), TimeSpan.Zero)));
}
return candidates;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to load zero-result alert candidates from database.");
}
}
var fallbackCandidates = _analyticsService.GetFallbackEventsSnapshot(tenantId, window)
.Where(item => item.Event.EventType.Equals("zero_result", StringComparison.OrdinalIgnoreCase))
.Where(item => !string.IsNullOrWhiteSpace(item.Event.Query))
.GroupBy(item => item.Event.Query.Trim(), StringComparer.OrdinalIgnoreCase)
.Select(group => new AlertCandidate(
group.Key,
group.Count(),
group.Min(item => item.RecordedAt),
group.Max(item => item.RecordedAt)))
.Where(candidate => candidate.OccurrenceCount >= ZeroResultAlertThreshold)
.OrderByDescending(candidate => candidate.OccurrenceCount)
.ToArray();
return fallbackCandidates;
}
private async Task<IReadOnlyList<AlertCandidate>> LoadNegativeFeedbackCandidatesAsync(
string tenantId,
TimeSpan window,
CancellationToken ct)
{
var candidates = new List<AlertCandidate>();
var days = Math.Max(1, (int)Math.Ceiling(window.TotalDays));
if (!string.IsNullOrWhiteSpace(_options.ConnectionString))
{
try
{
await using var conn = new NpgsqlConnection(_options.ConnectionString);
await conn.OpenAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(@"
SELECT query, COUNT(*)::int AS occurrence_count, MIN(created_at), MAX(created_at)
FROM advisoryai.search_feedback
WHERE tenant_id = @tenant_id
AND signal = 'not_helpful'
AND created_at > now() - make_interval(days => @days)
GROUP BY query
HAVING COUNT(*) >= @threshold
ORDER BY occurrence_count DESC", conn);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
cmd.Parameters.AddWithValue("days", days);
cmd.Parameters.AddWithValue("threshold", NegativeFeedbackAlertThreshold);
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
while (await reader.ReadAsync(ct).ConfigureAwait(false))
{
candidates.Add(new AlertCandidate(
reader.GetString(0),
reader.GetInt32(1),
new DateTimeOffset(reader.GetDateTime(2), TimeSpan.Zero),
new DateTimeOffset(reader.GetDateTime(3), TimeSpan.Zero)));
}
return candidates;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to load negative-feedback alert candidates from database.");
}
}
return GetFallbackFeedback(tenantId, window)
.Where(item => item.Entry.Signal.Equals("not_helpful", StringComparison.Ordinal))
.Where(item => !string.IsNullOrWhiteSpace(item.Entry.Query))
.GroupBy(item => item.Entry.Query.Trim(), StringComparer.OrdinalIgnoreCase)
.Select(group => new AlertCandidate(
group.Key,
group.Count(),
group.Min(item => item.CreatedAt),
group.Max(item => item.CreatedAt)))
.Where(candidate => candidate.OccurrenceCount >= NegativeFeedbackAlertThreshold)
.OrderByDescending(candidate => candidate.OccurrenceCount)
.ToArray();
}
private async Task UpsertAlertAsync(
string tenantId,
string alertType,
string query,
int occurrenceCount,
DateTimeOffset firstSeen,
DateTimeOffset lastSeen,
CancellationToken ct)
{
if (!string.IsNullOrWhiteSpace(_options.ConnectionString))
{
try
{
await using var conn = new NpgsqlConnection(_options.ConnectionString);
await conn.OpenAsync(ct).ConfigureAwait(false);
await using var findCmd = new NpgsqlCommand(@"
SELECT alert_id
FROM advisoryai.search_quality_alerts
WHERE tenant_id = @tenant_id
AND alert_type = @alert_type
AND query = @query
AND status <> 'resolved'
ORDER BY created_at DESC
LIMIT 1", conn);
findCmd.Parameters.AddWithValue("tenant_id", tenantId);
findCmd.Parameters.AddWithValue("alert_type", alertType);
findCmd.Parameters.AddWithValue("query", query);
var existingId = await findCmd.ExecuteScalarAsync(ct).ConfigureAwait(false);
if (existingId is Guid alertId)
{
await using var updateCmd = new NpgsqlCommand(@"
UPDATE advisoryai.search_quality_alerts
SET occurrence_count = @occurrence_count,
first_seen = LEAST(first_seen, @first_seen),
last_seen = GREATEST(last_seen, @last_seen),
status = 'open',
resolution = NULL
WHERE alert_id = @alert_id", conn);
updateCmd.Parameters.AddWithValue("alert_id", alertId);
updateCmd.Parameters.AddWithValue("occurrence_count", occurrenceCount);
updateCmd.Parameters.AddWithValue("first_seen", firstSeen.UtcDateTime);
updateCmd.Parameters.AddWithValue("last_seen", lastSeen.UtcDateTime);
await updateCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
}
else
{
await using var insertCmd = new NpgsqlCommand(@"
INSERT INTO advisoryai.search_quality_alerts
(tenant_id, alert_type, query, occurrence_count, first_seen, last_seen, status)
VALUES
(@tenant_id, @alert_type, @query, @occurrence_count, @first_seen, @last_seen, 'open')", conn);
insertCmd.Parameters.AddWithValue("tenant_id", tenantId);
insertCmd.Parameters.AddWithValue("alert_type", alertType);
insertCmd.Parameters.AddWithValue("query", query);
insertCmd.Parameters.AddWithValue("occurrence_count", occurrenceCount);
insertCmd.Parameters.AddWithValue("first_seen", firstSeen.UtcDateTime);
insertCmd.Parameters.AddWithValue("last_seen", lastSeen.UtcDateTime);
await insertCmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to upsert quality alert in database; applying in-memory fallback.");
}
}
lock (_fallbackLock)
{
var existingIndex = _fallbackAlerts.FindIndex(entry =>
entry.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase) &&
entry.AlertType.Equals(alertType, StringComparison.Ordinal) &&
entry.Query.Equals(query, StringComparison.OrdinalIgnoreCase) &&
!entry.Status.Equals("resolved", StringComparison.Ordinal));
if (existingIndex >= 0)
{
var existing = _fallbackAlerts[existingIndex];
_fallbackAlerts[existingIndex] = new SearchQualityAlertEntry
{
AlertId = existing.AlertId,
TenantId = existing.TenantId,
AlertType = existing.AlertType,
Query = existing.Query,
OccurrenceCount = occurrenceCount,
FirstSeen = existing.FirstSeen <= firstSeen.UtcDateTime ? existing.FirstSeen : firstSeen.UtcDateTime,
LastSeen = existing.LastSeen >= lastSeen.UtcDateTime ? existing.LastSeen : lastSeen.UtcDateTime,
Status = "open",
Resolution = null,
CreatedAt = existing.CreatedAt,
};
}
else
{
_fallbackAlerts.Add(new SearchQualityAlertEntry
{
AlertId = Guid.NewGuid().ToString("D"),
TenantId = tenantId,
AlertType = alertType,
Query = query,
OccurrenceCount = occurrenceCount,
FirstSeen = firstSeen.UtcDateTime,
LastSeen = lastSeen.UtcDateTime,
Status = "open",
Resolution = null,
CreatedAt = DateTime.UtcNow,
});
}
}
}
private void StoreFallbackFeedback(SearchFeedbackEntry entry, DateTimeOffset createdAt)
{
lock (_fallbackLock)
{
_fallbackFeedback.Add((entry, createdAt));
if (_fallbackFeedback.Count > 10_000)
{
_fallbackFeedback.RemoveRange(0, _fallbackFeedback.Count - 10_000);
}
}
}
private IReadOnlyList<(SearchFeedbackEntry Entry, DateTimeOffset CreatedAt)> GetFallbackFeedback(string tenantId, TimeSpan window)
{
var cutoff = DateTimeOffset.UtcNow - window;
lock (_fallbackLock)
{
return _fallbackFeedback
.Where(item => item.Entry.TenantId.Equals(tenantId, StringComparison.OrdinalIgnoreCase))
.Where(item => item.CreatedAt >= cutoff)
.OrderBy(item => item.CreatedAt)
.ToArray();
}
}
private static SearchQualityAlertEntry CloneAlertEntry(SearchQualityAlertEntry source)
{
return new SearchQualityAlertEntry
{
AlertId = source.AlertId,
TenantId = source.TenantId,
AlertType = source.AlertType,
Query = source.Query,
OccurrenceCount = source.OccurrenceCount,
FirstSeen = source.FirstSeen,
LastSeen = source.LastSeen,
Status = source.Status,
Resolution = source.Resolution,
CreatedAt = source.CreatedAt,
};
}
private readonly record struct AlertCandidate(
string Query,
int OccurrenceCount,
DateTimeOffset FirstSeen,
DateTimeOffset LastSeen);
// ----- Validation helpers -----
public static bool IsValidSignal(string? signal)

View File

@@ -0,0 +1,60 @@
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.KnowledgeSearch;
namespace StellaOps.AdvisoryAI.UnifiedSearch.Analytics;
internal sealed class SearchQualityMonitorBackgroundService : BackgroundService
{
private readonly KnowledgeSearchOptions _options;
private readonly SearchQualityMonitor _monitor;
private readonly ILogger<SearchQualityMonitorBackgroundService> _logger;
public SearchQualityMonitorBackgroundService(
IOptions<KnowledgeSearchOptions> options,
SearchQualityMonitor monitor,
ILogger<SearchQualityMonitorBackgroundService> logger)
{
_options = options.Value;
_monitor = monitor;
_logger = logger;
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
if (!_options.SearchQualityMonitorEnabled)
{
_logger.LogDebug("Search quality monitor background loop is disabled.");
return;
}
var interval = TimeSpan.FromSeconds(Math.Max(30, _options.SearchQualityMonitorIntervalSeconds));
while (!stoppingToken.IsCancellationRequested)
{
try
{
var refreshed = await _monitor.RefreshAlertsForKnownTenantsAsync(stoppingToken).ConfigureAwait(false);
_logger.LogDebug("Search quality monitor refreshed alerts for {TenantCount} tenants.", refreshed);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Search quality monitor background refresh failed.");
}
try
{
await Task.Delay(interval, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
}
}
}

View File

@@ -63,6 +63,7 @@ public static class UnifiedSearchServiceCollectionExtensions
services.TryAddSingleton<UnifiedSearchIndexer>();
services.TryAddSingleton<IUnifiedSearchIndexer>(provider => provider.GetRequiredService<UnifiedSearchIndexer>());
services.TryAddEnumerable(ServiceDescriptor.Singleton<IHostedService, UnifiedSearchIndexRefreshService>());
services.TryAddEnumerable(ServiceDescriptor.Singleton<IHostedService, SearchQualityMonitorBackgroundService>());
// Telemetry
services.TryAddSingleton<IUnifiedSearchTelemetrySink, LoggingUnifiedSearchTelemetrySink>();

View File

@@ -39,16 +39,7 @@ public sealed class ChatIntegrationTests : IClassFixture<WebApplicationFactory<S
});
});
_client = _factory.CreateClient();
// Current advisory-ai endpoints authorize using scope + actor headers.
_client.DefaultRequestHeaders.Add("X-StellaOps-Actor", "test-user");
_client.DefaultRequestHeaders.Add("X-StellaOps-Scopes", "advisory-ai:operate advisory:chat chat:user");
_client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "test-tenant");
// Keep legacy headers for compatibility with older code paths.
_client.DefaultRequestHeaders.Add("X-StellaOps-User", "test-user");
_client.DefaultRequestHeaders.Add("X-StellaOps-Client", "test-client");
_client.DefaultRequestHeaders.Add("X-StellaOps-Roles", "chat:user");
_client = CreateClientWithScopes("advisory-ai:operate advisory:chat chat:user");
}
#region Create Conversation Tests
@@ -263,6 +254,8 @@ public sealed class ChatIntegrationTests : IClassFixture<WebApplicationFactory<S
result.Should().NotBeNull();
result!.TurnId.Should().NotBeNullOrEmpty();
result.Content.Should().NotBeNullOrEmpty();
result.Content.Should().NotContain("placeholder response",
"add-turn runtime should use grounded path or deterministic fallback instead of placeholders");
}
[Fact]
@@ -284,6 +277,133 @@ public sealed class ChatIntegrationTests : IClassFixture<WebApplicationFactory<S
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
}
[Fact]
public async Task AddTurn_LegacyMessageField_Returns200WithResponse()
{
// Arrange - Create conversation first
var createRequest = new CreateConversationRequest { TenantId = "test-tenant-legacy-message" };
var createResponse = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", createRequest);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var turnRequest = new AddTurnRequest
{
Message = "Legacy payload message field still works",
Stream = false
};
// Act
var response = await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns",
turnRequest);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.OK);
response.Headers.TryGetValues("Deprecation", out var deprecationValues).Should().BeTrue();
deprecationValues.Should().Contain("true");
response.Headers.TryGetValues("Sunset", out var sunsetValues).Should().BeTrue();
sunsetValues.Should().Contain("Thu, 31 Dec 2026 23:59:59 GMT");
response.Headers.TryGetValues("Warning", out var warningValues).Should().BeTrue();
warningValues.Should().Contain(v => v.Contains("message", StringComparison.OrdinalIgnoreCase));
var result = await response.Content.ReadFromJsonAsync<AssistantTurnResponse>();
result.Should().NotBeNull();
result!.Content.Should().NotBeNullOrEmpty();
}
[Fact]
public async Task ChatEndpointFamilies_RequireOperateScope_ForEquivalentWriteOperations()
{
// Arrange
var createResponse = await _client.PostAsJsonAsync(
"/v1/advisory-ai/conversations",
new CreateConversationRequest { TenantId = "test-tenant-scope-parity" });
createResponse.StatusCode.Should().Be(HttpStatusCode.Created);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var viewOnlyClient = CreateClientWithScopes("advisory-ai:view");
// Act
var legacyAddTurnResponse = await viewOnlyClient.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns",
new AddTurnRequest { Content = "Can I add with view-only scope?" });
var gatewayQueryResponse = await viewOnlyClient.PostAsJsonAsync(
"/api/v1/chat/query",
new AdvisoryChatQueryRequest { Query = "Can I query with view-only scope?" });
// Assert
legacyAddTurnResponse.StatusCode.Should().Be(HttpStatusCode.Forbidden);
gatewayQueryResponse.StatusCode.Should().Be(HttpStatusCode.Forbidden);
}
[Fact]
public async Task ConversationAndGatewayEndpoints_ReturnGroundedRuntimeResponses()
{
// Arrange
var createResponse = await _client.PostAsJsonAsync(
"/v1/advisory-ai/conversations",
new CreateConversationRequest
{
TenantId = "test-tenant-runtime-consistency",
Context = new ConversationContextRequest { CurrentCveId = "CVE-2023-44487" }
});
createResponse.StatusCode.Should().Be(HttpStatusCode.Created);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
// Act
var addTurnResponse = await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns",
new AddTurnRequest { Content = "Assess CVE-2023-44487 risk and next action." });
var gatewayQueryResponse = await _client.PostAsJsonAsync(
"/api/v1/chat/query",
new AdvisoryChatQueryRequest
{
Query = "Assess CVE-2023-44487 risk and next action.",
ConversationId = created.ConversationId
});
// Assert
addTurnResponse.StatusCode.Should().Be(HttpStatusCode.OK);
gatewayQueryResponse.StatusCode.Should().Be(HttpStatusCode.OK);
var addTurnResult = await addTurnResponse.Content.ReadFromJsonAsync<AssistantTurnResponse>();
var gatewayResult = await gatewayQueryResponse.Content.ReadFromJsonAsync<AdvisoryChatQueryResponse>();
addTurnResult.Should().NotBeNull();
addTurnResult!.Content.Should().NotBeNullOrWhiteSpace();
addTurnResult.Content.Should().NotContain("placeholder response",
"conversation add-turn must use grounded runtime output or deterministic fallback");
gatewayResult.Should().NotBeNull();
gatewayResult!.Summary.Should().NotBeNullOrWhiteSpace();
gatewayResult.Summary.Should().NotContain("placeholder response",
"chat gateway endpoint must use grounded runtime output or deterministic fallback");
}
[Fact]
public async Task AddTurn_EmptyPayload_Returns400()
{
// Arrange - Create conversation first
var createRequest = new CreateConversationRequest { TenantId = "test-tenant-empty-message" };
var createResponse = await _client.PostAsJsonAsync("/v1/advisory-ai/conversations", createRequest);
var created = await createResponse.Content.ReadFromJsonAsync<ConversationResponse>();
var turnRequest = new AddTurnRequest
{
Content = " ",
Message = " ",
Stream = false
};
// Act
var response = await _client.PostAsJsonAsync(
$"/v1/advisory-ai/conversations/{created!.ConversationId}/turns",
turnRequest);
// Assert
response.StatusCode.Should().Be(HttpStatusCode.BadRequest);
}
[Fact]
public async Task AddTurn_MultipleMessages_BuildsConversationHistory()
{
@@ -394,6 +514,22 @@ public sealed class ChatIntegrationTests : IClassFixture<WebApplicationFactory<S
}
#endregion
private HttpClient CreateClientWithScopes(string scopes)
{
var client = _factory.CreateClient();
client.DefaultRequestHeaders.Add("X-StellaOps-Actor", "test-user");
client.DefaultRequestHeaders.Add("X-StellaOps-Scopes", scopes);
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "test-tenant");
client.DefaultRequestHeaders.Add("X-Tenant-Id", "test-tenant");
client.DefaultRequestHeaders.Add("X-User-Id", "test-user");
// Keep legacy headers for compatibility with older code paths.
client.DefaultRequestHeaders.Add("X-StellaOps-User", "test-user");
client.DefaultRequestHeaders.Add("X-StellaOps-Client", "test-client");
client.DefaultRequestHeaders.Add("X-StellaOps-Roles", "chat:user");
return client;
}
}
/// <summary>

View File

@@ -131,6 +131,85 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
"legacy search endpoint should include Sunset header");
}
[Fact]
public void G5_ExactLexicalRank_PrecedesFuzzyFallbackRank()
{
using var exactMetadata = JsonDocument.Parse("""{"entity_key":"docs:ci-guide","domain":"knowledge"}""");
using var fuzzyMetadata = JsonDocument.Parse("""{"entity_key":"docs:city-overview","domain":"knowledge"}""");
var exactRow = new KnowledgeChunkRow(
ChunkId: "chunk-exact",
DocId: "doc-1",
Kind: "md_section",
Anchor: "ci",
SectionPath: null,
SpanStart: 0,
SpanEnd: 64,
Title: "CI pipeline troubleshooting",
Body: "CI pipeline troubleshooting and retry guidance.",
Snippet: "CI pipeline troubleshooting",
Metadata: exactMetadata,
Embedding: null,
LexicalScore: 1.2);
var fuzzyRow = new KnowledgeChunkRow(
ChunkId: "chunk-fuzzy",
DocId: "doc-2",
Kind: "md_section",
Anchor: "city",
SectionPath: null,
SpanStart: 0,
SpanEnd: 64,
Title: "City overview",
Body: "City overview details for unrelated content.",
Snippet: "City overview",
Metadata: fuzzyMetadata,
Embedding: null,
LexicalScore: 0);
var lexicalRanks = new Dictionary<string, (string ChunkId, int Rank, KnowledgeChunkRow Row)>(StringComparer.Ordinal)
{
["chunk-exact"] = ("chunk-exact", 1, exactRow),
["chunk-fuzzy"] = ("chunk-fuzzy", 2, fuzzyRow)
};
var ranked = WeightedRrfFusion.Fuse(
new Dictionary<string, double>(StringComparer.Ordinal) { ["knowledge"] = 1.0 },
lexicalRanks,
[],
"ci",
null,
null,
enableFreshnessBoost: false,
referenceTime: null,
popularityMap: null,
popularityBoostWeight: 0);
ranked.Should().HaveCount(2);
ranked[0].Row.ChunkId.Should().Be("chunk-exact",
"exact lexical hits should stay ahead of fuzzy fallback candidates");
}
[Fact]
public async Task G5_QueryCi_ReturnsRelevantResults()
{
using var client = CreateAuthenticatedClient();
var response = await client.PostAsJsonAsync("/v1/search/query", new UnifiedSearchApiRequest
{
Q = "ci"
});
response.StatusCode.Should().Be(HttpStatusCode.OK);
var payload = await response.Content.ReadFromJsonAsync<UnifiedSearchApiResponse>();
payload.Should().NotBeNull();
payload!.Cards.Should().NotBeEmpty("short technical acronyms should still return useful matches");
payload.Cards.Any(card =>
card.Title.Contains("CI", StringComparison.OrdinalIgnoreCase) ||
card.Snippet.Contains("CI", StringComparison.OrdinalIgnoreCase))
.Should().BeTrue("the CI query should return CI-relevant search cards");
}
// ────────────────────────────────────────────────────────────────
// Sprint 102 (G1) - ONNX Vector Encoder
// ────────────────────────────────────────────────────────────────
@@ -608,6 +687,70 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
"role bias should not apply when RoleBasedBiasEnabled is false");
}
[Fact]
public void G6_DomainWeightCalculator_ScannerReadBiasesFindings_ForGenericQuery()
{
var calculator = new DomainWeightCalculator(
new EntityExtractor(),
new IntentClassifier(),
Microsoft.Extensions.Options.Options.Create(new KnowledgeSearchOptions
{
RoleBasedBiasEnabled = true
}));
var baseWeights = calculator.ComputeWeights("release status", [], null);
var scopedWeights = calculator.ComputeWeights("release status", [], new UnifiedSearchFilter
{
UserScopes = ["scanner:read"]
});
scopedWeights["findings"].Should().BeGreaterThan(baseWeights["findings"],
"scanner:read should bias generic queries toward findings");
}
[Fact]
public void G6_DomainWeightCalculator_PolicyWriteBiasesPolicy_ForGenericQuery()
{
var calculator = new DomainWeightCalculator(
new EntityExtractor(),
new IntentClassifier(),
Microsoft.Extensions.Options.Options.Create(new KnowledgeSearchOptions
{
RoleBasedBiasEnabled = true
}));
var baseWeights = calculator.ComputeWeights("release status", [], null);
var scopedWeights = calculator.ComputeWeights("release status", [], new UnifiedSearchFilter
{
UserScopes = ["policy:write"]
});
scopedWeights["policy"].Should().BeGreaterThan(baseWeights["policy"],
"policy:write should bias generic queries toward policy");
}
[Fact]
public void G6_DomainWeightCalculator_NoRelevantScopes_LeavesWeightsUnbiased()
{
var calculator = new DomainWeightCalculator(
new EntityExtractor(),
new IntentClassifier(),
Microsoft.Extensions.Options.Options.Create(new KnowledgeSearchOptions
{
RoleBasedBiasEnabled = true
}));
var baseWeights = calculator.ComputeWeights("release status", [], null);
var unrelatedScopeWeights = calculator.ComputeWeights("release status", [], new UnifiedSearchFilter
{
UserScopes = ["dashboard:read"]
});
unrelatedScopeWeights["findings"].Should().Be(baseWeights["findings"]);
unrelatedScopeWeights["policy"].Should().Be(baseWeights["policy"]);
unrelatedScopeWeights["knowledge"].Should().Be(baseWeights["knowledge"]);
}
[Fact]
public void G6_WeightedRrfFusion_PopularityBoost_AppliesWhenMapProvided()
{
@@ -726,6 +869,15 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
detector.DetectLanguage("der Container startet nicht und die Logs zeigen einen Fehler").Should().Be("de");
}
[Fact]
public void G9_QueryLanguageDetector_DetectsGermanSecurityPluralTerms()
{
var detector = new QueryLanguageDetector();
var language = detector.DetectLanguage("Sicherheitslücken in der Produktion");
language.Should().Be("de");
detector.MapLanguageToFtsConfig(language).Should().Be("german");
}
[Fact]
public void G9_QueryLanguageDetector_DetectsFrench()
{
@@ -888,6 +1040,13 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
classifier.Classify("qu'est-ce que une politique de s\u00e9curit\u00e9", "fr").Should().Be("explore");
}
[Fact]
public void G9_IntentClassifier_ClassifiesFrenchTroubleshoot()
{
var classifier = new IntentClassifier();
classifier.Classify("corriger l'erreur de connexion", "fr").Should().Be("troubleshoot");
}
[Fact]
public void G9_IntentClassifier_ClassifiesSpanishCompare()
{
@@ -926,6 +1085,31 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
classifier.HasPolicyIntent("docker login fails").Should().BeFalse();
}
[Fact]
public void G9_MultilingualIntentKeywords_AreUtf8Clean()
{
var markerFragments = new[] { "Ã", "Ð", "Ñ", "\uFFFD" };
var keywordMaps = new[]
{
MultilingualIntentKeywords.GetNavigateKeywords(),
MultilingualIntentKeywords.GetTroubleshootKeywords(),
MultilingualIntentKeywords.GetExploreKeywords(),
MultilingualIntentKeywords.GetCompareKeywords(),
};
foreach (var map in keywordMaps)
{
foreach (var terms in map.Values)
{
foreach (var term in terms)
{
markerFragments.Any(term.Contains).Should().BeFalse(
$"keyword '{term}' should not contain mojibake fragments");
}
}
}
}
[Fact]
public void G9_SynthesisTemplateEngine_GermanLocale_ProducesGermanOutput()
{
@@ -958,6 +1142,31 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
"French locale should produce French-localized synthesis output");
}
[Fact]
public void G9_DoctorSearchSeedLoader_LoadsGermanLocalizedEntries()
{
var repoRoot = Path.GetFullPath(Path.Combine(
AppContext.BaseDirectory,
"..", "..", "..", "..", "..", "..", ".."));
var baseSeedPath = Path.Combine(
repoRoot,
"src",
"AdvisoryAI",
"StellaOps.AdvisoryAI",
"KnowledgeSearch",
"doctor-search-seed.json");
File.Exists(baseSeedPath).Should().BeTrue("doctor seed base file must exist for localization checks");
var localized = DoctorSearchSeedLoader.LoadLocalized(baseSeedPath);
localized.Should().ContainKey("de",
"German localized doctor seed file should be discovered");
localized["de"].Should().NotBeEmpty();
localized["de"].Any(entry =>
entry.Title.Contains("Konnektivität", StringComparison.OrdinalIgnoreCase) ||
entry.Description.Contains("Datenbank", StringComparison.OrdinalIgnoreCase))
.Should().BeTrue("German doctor entries should expose localized descriptions/titles");
}
[Fact]
public void G9_SynthesisTemplateEngine_SpanishLocale_ProducesSpanishOutput()
{
@@ -1179,6 +1388,69 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
response.StatusCode.Should().Be(HttpStatusCode.NoContent);
}
[Fact]
public async Task G6_AnalyticsClickEvent_IsStoredForPopularitySignals()
{
using var client = CreateAuthenticatedClient();
var response = await client.PostAsJsonAsync("/v1/advisory-ai/search/analytics", new SearchAnalyticsApiRequest
{
Events =
[
new SearchAnalyticsApiEvent
{
EventType = "click",
Query = "docker login",
EntityKey = "docs:troubleshooting",
Domain = "knowledge",
Position = 1
}
]
});
response.StatusCode.Should().Be(HttpStatusCode.NoContent);
using var scope = _factory.Services.CreateScope();
var analyticsService = scope.ServiceProvider.GetRequiredService<SearchAnalyticsService>();
var popularity = await analyticsService.GetPopularityMapAsync("test-tenant", 30);
popularity.Should().ContainKey("docs:troubleshooting",
"click analytics events should be persisted for popularity ranking signals");
popularity["docs:troubleshooting"].Should().BeGreaterThan(0);
}
[Fact]
public async Task G6_SearchHistory_IsPersistedAndQueryable_FromAnalyticsFlow()
{
using var client = _factory.CreateClient();
client.DefaultRequestHeaders.Add("X-StellaOps-Scopes", "advisory-ai:operate");
client.DefaultRequestHeaders.Add("X-StellaOps-Tenant", "test-tenant");
client.DefaultRequestHeaders.Add("X-StellaOps-Actor", "integration-user");
var analyticsResponse = await client.PostAsJsonAsync("/v1/advisory-ai/search/analytics", new SearchAnalyticsApiRequest
{
Events =
[
new SearchAnalyticsApiEvent
{
EventType = "query",
Query = "history integration probe",
ResultCount = 2
}
]
});
analyticsResponse.StatusCode.Should().Be(HttpStatusCode.NoContent);
var historyResponse = await client.GetAsync("/v1/advisory-ai/search/history");
historyResponse.StatusCode.Should().Be(HttpStatusCode.OK);
var historyPayload = await historyResponse.Content.ReadFromJsonAsync<SearchHistoryApiResponse>();
historyPayload.Should().NotBeNull();
historyPayload!.Entries.Any(entry => entry.Query.Equals("history integration probe", StringComparison.Ordinal))
.Should().BeTrue("successful query analytics should be persisted in server-side history");
}
[Fact]
public async Task G10_AnalyticsEndpoint_EmptyEvents_ReturnsBadRequest()
{
@@ -1212,6 +1484,32 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
"batch exceeding 100 events should be rejected");
}
[Fact]
public async Task G10_ZeroResultBurst_CreatesQualityAlert()
{
using var client = CreateAuthenticatedClient();
var response = await client.PostAsJsonAsync("/v1/advisory-ai/search/analytics", new SearchAnalyticsApiRequest
{
Events = Enumerable.Range(0, 5).Select(_ => new SearchAnalyticsApiEvent
{
EventType = "zero_result",
Query = "nonexistent vulnerability token",
ResultCount = 0
}).ToArray()
});
response.StatusCode.Should().Be(HttpStatusCode.NoContent);
using var scope = _factory.Services.CreateScope();
var monitor = scope.ServiceProvider.GetRequiredService<SearchQualityMonitor>();
await monitor.RefreshAlertsAsync("test-tenant");
var alerts = await monitor.GetAlertsAsync("test-tenant", status: "open", alertType: "zero_result");
alerts.Any(alert => alert.Query.Equals("nonexistent vulnerability token", StringComparison.OrdinalIgnoreCase))
.Should().BeTrue("five repeated zero-result events should create a zero_result quality alert");
}
[Fact]
public async Task G10_AlertUpdateEndpoint_InvalidStatus_ReturnsBadRequest()
{
@@ -1444,6 +1742,8 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
{
public Task<UnifiedSearchResponse> SearchAsync(UnifiedSearchRequest request, CancellationToken cancellationToken)
{
var normalizedQuery = request.Q.Trim();
var isCiQuery = normalizedQuery.Equals("ci", StringComparison.OrdinalIgnoreCase);
var cards = new List<EntityCard>();
// Apply domain filtering if specified
@@ -1483,8 +1783,8 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
EntityKey = "docs:troubleshooting",
EntityType = "docs",
Domain = "knowledge",
Title = "Troubleshooting Guide",
Snippet = "Common troubleshooting steps",
Title = isCiQuery ? "CI pipeline troubleshooting" : "Troubleshooting Guide",
Snippet = isCiQuery ? "CI checks, retries, and pipeline diagnostics." : "Common troubleshooting steps",
Score = 0.85,
Actions =
[
@@ -1499,7 +1799,7 @@ public sealed class UnifiedSearchSprintIntegrationTests : IDisposable
{
synthesis = new SynthesisResult
{
Summary = $"Found {cards.Count} results for \"{request.Q}\".",
Summary = $"Found {cards.Count} results for \"{normalizedQuery}\".",
Template = "mixed_overview",
Confidence = cards.Count >= 2 ? "medium" : "low",
SourceCount = cards.Count,

View File

@@ -137,6 +137,96 @@ public sealed class WeightedRrfFusionTests
}
}
[Fact]
public void Fuse_popularity_boost_can_raise_high_click_result_above_lower_ranked_peer()
{
var weights = new Dictionary<string, double> { ["findings"] = 1.0 };
using var lowMeta = JsonDocument.Parse("""{"entity_key":"finding:low","domain":"findings"}""");
using var highMeta = JsonDocument.Parse("""{"entity_key":"finding:high","domain":"findings"}""");
var lowerClickRow = MakeRow("chunk-low", "finding", "Lower click finding", lowMeta);
var higherClickRow = MakeRow("chunk-high", "finding", "Higher click finding", highMeta);
var lexical = new Dictionary<string, (string ChunkId, int Rank, KnowledgeChunkRow Row)>(StringComparer.Ordinal)
{
["chunk-low"] = ("chunk-low", 1, lowerClickRow),
["chunk-high"] = ("chunk-high", 2, higherClickRow)
};
var withPopularity = WeightedRrfFusion.Fuse(
weights,
lexical,
[],
"finding",
null,
null,
enableFreshnessBoost: false,
referenceTime: null,
popularityMap: new Dictionary<string, int>(StringComparer.Ordinal)
{
["finding:low"] = 0,
["finding:high"] = 100
},
popularityBoostWeight: 0.05);
withPopularity.Should().HaveCount(2);
withPopularity[0].Row.ChunkId.Should().Be("chunk-high",
"high-click result should outrank lower-click peer when popularity boost is enabled");
}
[Fact]
public void Fuse_without_popularity_boost_keeps_baseline_ranking()
{
var weights = new Dictionary<string, double> { ["findings"] = 1.0 };
using var lowMeta = JsonDocument.Parse("""{"entity_key":"finding:low","domain":"findings"}""");
using var highMeta = JsonDocument.Parse("""{"entity_key":"finding:high","domain":"findings"}""");
var lowerClickRow = MakeRow("chunk-low", "finding", "Lower click finding", lowMeta);
var higherClickRow = MakeRow("chunk-high", "finding", "Higher click finding", highMeta);
var lexical = new Dictionary<string, (string ChunkId, int Rank, KnowledgeChunkRow Row)>(StringComparer.Ordinal)
{
["chunk-low"] = ("chunk-low", 1, lowerClickRow),
["chunk-high"] = ("chunk-high", 2, higherClickRow)
};
var baseline = WeightedRrfFusion.Fuse(
weights,
lexical,
[],
"finding",
null,
null,
enableFreshnessBoost: false,
referenceTime: null,
popularityMap: null,
popularityBoostWeight: 0.0);
var disabledWithMap = WeightedRrfFusion.Fuse(
weights,
lexical,
[],
"finding",
null,
null,
enableFreshnessBoost: false,
referenceTime: null,
popularityMap: new Dictionary<string, int>(StringComparer.Ordinal)
{
["finding:low"] = 0,
["finding:high"] = 100
},
popularityBoostWeight: 0.0);
baseline.Should().HaveCount(2);
disabledWithMap.Should().HaveCount(2);
baseline[0].Row.ChunkId.Should().Be("chunk-low");
disabledWithMap[0].Row.ChunkId.Should().Be("chunk-low",
"when popularity boost is disabled, ranking should match the baseline order");
}
private static KnowledgeChunkRow MakeRow(
string chunkId,
string kind,

View File

@@ -1,29 +1,21 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Core.Hashing;
using StellaOps.Orchestrator.Infrastructure.EfCore.Models;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using System.Text;
namespace StellaOps.Orchestrator.Infrastructure.Postgres;
/// <summary>
/// PostgreSQL implementation of the audit repository.
/// Read methods use EF Core LINQ; write/PL/pgSQL methods remain as raw SQL.
/// </summary>
public sealed class PostgresAuditRepository : IAuditRepository
{
private const string SelectAuditColumns = """
entry_id, tenant_id, event_type, resource_type, resource_id, actor_id, actor_type,
actor_ip, user_agent, http_method, request_path, old_state, new_state, description,
correlation_id, previous_entry_hash, content_hash, sequence_number, occurred_at, metadata
""";
private const string SelectByIdSql = $"""
SELECT {SelectAuditColumns}
FROM audit_entries
WHERE tenant_id = @tenant_id AND entry_id = @entry_id
""";
private const string DefaultSchema = OrchestratorDbContextFactory.DefaultSchemaName;
private const string InsertEntrySql = """
INSERT INTO audit_entries (
@@ -36,14 +28,6 @@ public sealed class PostgresAuditRepository : IAuditRepository
@correlation_id, @previous_entry_hash, @content_hash, @sequence_number, @occurred_at, @metadata::jsonb)
""";
private const string SelectLatestSql = $"""
SELECT {SelectAuditColumns}
FROM audit_entries
WHERE tenant_id = @tenant_id
ORDER BY sequence_number DESC
LIMIT 1
""";
private const string GetSequenceSql = """
SELECT next_seq, prev_hash FROM next_audit_sequence(@tenant_id)
""";
@@ -182,18 +166,14 @@ public sealed class PostgresAuditRepository : IAuditRepository
CancellationToken cancellationToken = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectByIdSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("entry_id", entryId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
var entity = await ctx.AuditEntries
.AsNoTracking()
.FirstOrDefaultAsync(e => e.TenantId == tenantId && e.EntryId == entryId, cancellationToken)
.ConfigureAwait(false);
return MapEntry(reader);
return entity is null ? null : MapEntry(entity);
}
public async Task<IReadOnlyList<AuditEntry>> ListAsync(
@@ -208,24 +188,52 @@ public sealed class PostgresAuditRepository : IAuditRepository
int offset = 0,
CancellationToken cancellationToken = default)
{
var (sql, parameters) = BuildListQuery(tenantId, eventType, resourceType, resourceId, actorId, startTime, endTime, limit, offset);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
foreach (var (name, value) in parameters)
IQueryable<AuditEntryEntity> query = ctx.AuditEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId);
if (eventType.HasValue)
{
command.Parameters.AddWithValue(name, value);
var eventTypeValue = (int)eventType.Value;
query = query.Where(e => e.EventType == eventTypeValue);
}
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var entries = new List<AuditEntry>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
if (resourceType is not null)
{
entries.Add(MapEntry(reader));
query = query.Where(e => e.ResourceType == resourceType);
}
return entries;
if (resourceId.HasValue)
{
query = query.Where(e => e.ResourceId == resourceId.Value);
}
if (actorId is not null)
{
query = query.Where(e => e.ActorId == actorId);
}
if (startTime.HasValue)
{
query = query.Where(e => e.OccurredAt >= startTime.Value);
}
if (endTime.HasValue)
{
query = query.Where(e => e.OccurredAt <= endTime.Value);
}
var entities = await query
.OrderByDescending(e => e.OccurredAt)
.Skip(offset)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return entities.Select(MapEntry).ToList();
}
public async Task<IReadOnlyList<AuditEntry>> GetBySequenceRangeAsync(
@@ -234,29 +242,17 @@ public sealed class PostgresAuditRepository : IAuditRepository
long endSequence,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT {SelectAuditColumns}
FROM audit_entries
WHERE tenant_id = @tenant_id
AND sequence_number >= @start_seq
AND sequence_number <= @end_seq
ORDER BY sequence_number ASC
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("start_seq", startSequence);
command.Parameters.AddWithValue("end_seq", endSequence);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var entries = new List<AuditEntry>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
entries.Add(MapEntry(reader));
}
return entries;
var entities = await ctx.AuditEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId && e.SequenceNumber >= startSequence && e.SequenceNumber <= endSequence)
.OrderBy(e => e.SequenceNumber)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return entities.Select(MapEntry).ToList();
}
public async Task<AuditEntry?> GetLatestAsync(
@@ -264,17 +260,16 @@ public sealed class PostgresAuditRepository : IAuditRepository
CancellationToken cancellationToken = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectLatestSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
var entity = await ctx.AuditEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId)
.OrderByDescending(e => e.SequenceNumber)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return MapEntry(reader);
return entity is null ? null : MapEntry(entity);
}
public async Task<IReadOnlyList<AuditEntry>> GetByResourceAsync(
@@ -284,31 +279,18 @@ public sealed class PostgresAuditRepository : IAuditRepository
int limit = 100,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT {SelectAuditColumns}
FROM audit_entries
WHERE tenant_id = @tenant_id
AND resource_type = @resource_type
AND resource_id = @resource_id
ORDER BY occurred_at DESC
LIMIT @limit
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("resource_type", resourceType);
command.Parameters.AddWithValue("resource_id", resourceId);
command.Parameters.AddWithValue("limit", limit);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var entries = new List<AuditEntry>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
entries.Add(MapEntry(reader));
}
return entries;
var entities = await ctx.AuditEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId && e.ResourceType == resourceType && e.ResourceId == resourceId)
.OrderByDescending(e => e.OccurredAt)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return entities.Select(MapEntry).ToList();
}
public async Task<long> GetCountAsync(
@@ -318,38 +300,30 @@ public sealed class PostgresAuditRepository : IAuditRepository
DateTimeOffset? endTime = null,
CancellationToken cancellationToken = default)
{
var sb = new StringBuilder("SELECT COUNT(*) FROM audit_entries WHERE tenant_id = @tenant_id");
var parameters = new List<(string, object)> { ("tenant_id", tenantId) };
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
IQueryable<AuditEntryEntity> query = ctx.AuditEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId);
if (eventType.HasValue)
{
sb.Append(" AND event_type = @event_type");
parameters.Add(("event_type", (int)eventType.Value));
var eventTypeValue = (int)eventType.Value;
query = query.Where(e => e.EventType == eventTypeValue);
}
if (startTime.HasValue)
{
sb.Append(" AND occurred_at >= @start_time");
parameters.Add(("start_time", startTime.Value));
query = query.Where(e => e.OccurredAt >= startTime.Value);
}
if (endTime.HasValue)
{
sb.Append(" AND occurred_at <= @end_time");
parameters.Add(("end_time", endTime.Value));
query = query.Where(e => e.OccurredAt <= endTime.Value);
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sb.ToString(), connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
foreach (var (name, value) in parameters)
{
command.Parameters.AddWithValue(name, value);
}
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return Convert.ToInt64(result);
return await query.LongCountAsync(cancellationToken).ConfigureAwait(false);
}
public async Task<ChainVerificationResult> VerifyChainAsync(
@@ -429,87 +403,25 @@ public sealed class PostgresAuditRepository : IAuditRepository
command.Parameters.AddWithValue("metadata", (object?)entry.Metadata ?? DBNull.Value);
}
private static AuditEntry MapEntry(NpgsqlDataReader reader)
{
return new AuditEntry(
EntryId: reader.GetGuid(0),
TenantId: reader.GetString(1),
EventType: (AuditEventType)reader.GetInt32(2),
ResourceType: reader.GetString(3),
ResourceId: reader.GetGuid(4),
ActorId: reader.GetString(5),
ActorType: (ActorType)reader.GetInt32(6),
ActorIp: reader.IsDBNull(7) ? null : reader.GetString(7),
UserAgent: reader.IsDBNull(8) ? null : reader.GetString(8),
HttpMethod: reader.IsDBNull(9) ? null : reader.GetString(9),
RequestPath: reader.IsDBNull(10) ? null : reader.GetString(10),
OldState: reader.IsDBNull(11) ? null : reader.GetString(11),
NewState: reader.IsDBNull(12) ? null : reader.GetString(12),
Description: reader.GetString(13),
CorrelationId: reader.IsDBNull(14) ? null : reader.GetString(14),
PreviousEntryHash: reader.IsDBNull(15) ? null : reader.GetString(15),
ContentHash: reader.GetString(16),
SequenceNumber: reader.GetInt64(17),
OccurredAt: reader.GetFieldValue<DateTimeOffset>(18),
Metadata: reader.IsDBNull(19) ? null : reader.GetString(19));
}
private static (string sql, List<(string name, object value)> parameters) BuildListQuery(
string tenantId,
AuditEventType? eventType,
string? resourceType,
Guid? resourceId,
string? actorId,
DateTimeOffset? startTime,
DateTimeOffset? endTime,
int limit,
int offset)
{
var sb = new StringBuilder();
sb.Append($"SELECT {SelectAuditColumns} FROM audit_entries WHERE tenant_id = @tenant_id");
var parameters = new List<(string, object)> { ("tenant_id", tenantId) };
if (eventType.HasValue)
{
sb.Append(" AND event_type = @event_type");
parameters.Add(("event_type", (int)eventType.Value));
}
if (resourceType is not null)
{
sb.Append(" AND resource_type = @resource_type");
parameters.Add(("resource_type", resourceType));
}
if (resourceId.HasValue)
{
sb.Append(" AND resource_id = @resource_id");
parameters.Add(("resource_id", resourceId.Value));
}
if (actorId is not null)
{
sb.Append(" AND actor_id = @actor_id");
parameters.Add(("actor_id", actorId));
}
if (startTime.HasValue)
{
sb.Append(" AND occurred_at >= @start_time");
parameters.Add(("start_time", startTime.Value));
}
if (endTime.HasValue)
{
sb.Append(" AND occurred_at <= @end_time");
parameters.Add(("end_time", endTime.Value));
}
sb.Append(" ORDER BY occurred_at DESC LIMIT @limit OFFSET @offset");
parameters.Add(("limit", limit));
parameters.Add(("offset", offset));
return (sb.ToString(), parameters);
}
private static AuditEntry MapEntry(AuditEntryEntity e) => new(
EntryId: e.EntryId,
TenantId: e.TenantId,
EventType: (AuditEventType)e.EventType,
ResourceType: e.ResourceType,
ResourceId: e.ResourceId,
ActorId: e.ActorId,
ActorType: (ActorType)e.ActorType,
ActorIp: e.ActorIp,
UserAgent: e.UserAgent,
HttpMethod: e.HttpMethod,
RequestPath: e.RequestPath,
OldState: e.OldState,
NewState: e.NewState,
Description: e.Description,
CorrelationId: e.CorrelationId,
PreviousEntryHash: e.PreviousEntryHash,
ContentHash: e.ContentHash,
SequenceNumber: e.SequenceNumber,
OccurredAt: e.OccurredAt,
Metadata: e.Metadata);
}

View File

@@ -1,7 +1,9 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Infrastructure.EfCore.Models;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using System.Text;
@@ -9,27 +11,11 @@ namespace StellaOps.Orchestrator.Infrastructure.Postgres;
/// <summary>
/// PostgreSQL implementation of the ledger repository.
/// Read methods use EF Core LINQ; write/PL/pgSQL methods remain as raw SQL.
/// </summary>
public sealed class PostgresLedgerRepository : ILedgerRepository
{
private const string SelectLedgerColumns = """
ledger_id, tenant_id, run_id, source_id, run_type, final_status, total_jobs,
succeeded_jobs, failed_jobs, run_created_at, run_started_at, run_completed_at,
execution_duration_ms, initiated_by, input_digest, output_digest, artifact_manifest,
sequence_number, previous_entry_hash, content_hash, ledger_created_at, correlation_id, metadata
""";
private const string SelectByIdSql = $"""
SELECT {SelectLedgerColumns}
FROM run_ledger_entries
WHERE tenant_id = @tenant_id AND ledger_id = @ledger_id
""";
private const string SelectByRunIdSql = $"""
SELECT {SelectLedgerColumns}
FROM run_ledger_entries
WHERE tenant_id = @tenant_id AND run_id = @run_id
""";
private const string DefaultSchema = OrchestratorDbContextFactory.DefaultSchemaName;
private const string InsertEntrySql = """
INSERT INTO run_ledger_entries (
@@ -44,14 +30,6 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
@sequence_number, @previous_entry_hash, @content_hash, @ledger_created_at, @correlation_id, @metadata::jsonb)
""";
private const string SelectLatestSql = $"""
SELECT {SelectLedgerColumns}
FROM run_ledger_entries
WHERE tenant_id = @tenant_id
ORDER BY sequence_number DESC
LIMIT 1
""";
private const string GetSequenceSql = """
SELECT next_seq, prev_hash FROM next_ledger_sequence(@tenant_id)
""";
@@ -170,18 +148,14 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
CancellationToken cancellationToken = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectByIdSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("ledger_id", ledgerId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
var entity = await ctx.RunLedgerEntries
.AsNoTracking()
.FirstOrDefaultAsync(e => e.TenantId == tenantId && e.LedgerId == ledgerId, cancellationToken)
.ConfigureAwait(false);
return MapEntry(reader);
return entity is null ? null : MapEntry(entity);
}
public async Task<RunLedgerEntry?> GetByRunIdAsync(
@@ -190,18 +164,14 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
CancellationToken cancellationToken = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectByRunIdSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("run_id", runId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
var entity = await ctx.RunLedgerEntries
.AsNoTracking()
.FirstOrDefaultAsync(e => e.TenantId == tenantId && e.RunId == runId, cancellationToken)
.ConfigureAwait(false);
return MapEntry(reader);
return entity is null ? null : MapEntry(entity);
}
public async Task<IReadOnlyList<RunLedgerEntry>> ListAsync(
@@ -215,24 +185,47 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
int offset = 0,
CancellationToken cancellationToken = default)
{
var (sql, parameters) = BuildListQuery(tenantId, runType, sourceId, finalStatus, startTime, endTime, limit, offset);
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
foreach (var (name, value) in parameters)
IQueryable<RunLedgerEntryEntity> query = ctx.RunLedgerEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId);
if (runType is not null)
{
command.Parameters.AddWithValue(name, value);
query = query.Where(e => e.RunType == runType);
}
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var entries = new List<RunLedgerEntry>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
if (sourceId.HasValue)
{
entries.Add(MapEntry(reader));
query = query.Where(e => e.SourceId == sourceId.Value);
}
return entries;
if (finalStatus.HasValue)
{
var statusValue = (int)finalStatus.Value;
query = query.Where(e => e.FinalStatus == statusValue);
}
if (startTime.HasValue)
{
query = query.Where(e => e.LedgerCreatedAt >= startTime.Value);
}
if (endTime.HasValue)
{
query = query.Where(e => e.LedgerCreatedAt <= endTime.Value);
}
var entities = await query
.OrderByDescending(e => e.LedgerCreatedAt)
.Skip(offset)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return entities.Select(MapEntry).ToList();
}
public async Task<IReadOnlyList<RunLedgerEntry>> GetBySequenceRangeAsync(
@@ -241,29 +234,17 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
long endSequence,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT {SelectLedgerColumns}
FROM run_ledger_entries
WHERE tenant_id = @tenant_id
AND sequence_number >= @start_seq
AND sequence_number <= @end_seq
ORDER BY sequence_number ASC
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("start_seq", startSequence);
command.Parameters.AddWithValue("end_seq", endSequence);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var entries = new List<RunLedgerEntry>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
entries.Add(MapEntry(reader));
}
return entries;
var entities = await ctx.RunLedgerEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId && e.SequenceNumber >= startSequence && e.SequenceNumber <= endSequence)
.OrderBy(e => e.SequenceNumber)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return entities.Select(MapEntry).ToList();
}
public async Task<RunLedgerEntry?> GetLatestAsync(
@@ -271,17 +252,16 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
CancellationToken cancellationToken = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectLatestSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
return null;
}
var entity = await ctx.RunLedgerEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId)
.OrderByDescending(e => e.SequenceNumber)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return MapEntry(reader);
return entity is null ? null : MapEntry(entity);
}
public async Task<IReadOnlyList<RunLedgerEntry>> GetBySourceAsync(
@@ -290,29 +270,18 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
int limit = 100,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT {SelectLedgerColumns}
FROM run_ledger_entries
WHERE tenant_id = @tenant_id
AND source_id = @source_id
ORDER BY ledger_created_at DESC
LIMIT @limit
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("source_id", sourceId);
command.Parameters.AddWithValue("limit", limit);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
var entries = new List<RunLedgerEntry>();
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
entries.Add(MapEntry(reader));
}
return entries;
var entities = await ctx.RunLedgerEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId && e.SourceId == sourceId)
.OrderByDescending(e => e.LedgerCreatedAt)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return entities.Select(MapEntry).ToList();
}
public async Task<long> GetCountAsync(
@@ -323,44 +292,34 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
DateTimeOffset? endTime = null,
CancellationToken cancellationToken = default)
{
var sb = new StringBuilder("SELECT COUNT(*) FROM run_ledger_entries WHERE tenant_id = @tenant_id");
var parameters = new List<(string, object)> { ("tenant_id", tenantId) };
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
IQueryable<RunLedgerEntryEntity> query = ctx.RunLedgerEntries
.AsNoTracking()
.Where(e => e.TenantId == tenantId);
if (runType is not null)
{
sb.Append(" AND run_type = @run_type");
parameters.Add(("run_type", runType));
query = query.Where(e => e.RunType == runType);
}
if (sourceId.HasValue)
{
sb.Append(" AND source_id = @source_id");
parameters.Add(("source_id", sourceId.Value));
query = query.Where(e => e.SourceId == sourceId.Value);
}
if (startTime.HasValue)
{
sb.Append(" AND ledger_created_at >= @start_time");
parameters.Add(("start_time", startTime.Value));
query = query.Where(e => e.LedgerCreatedAt >= startTime.Value);
}
if (endTime.HasValue)
{
sb.Append(" AND ledger_created_at <= @end_time");
parameters.Add(("end_time", endTime.Value));
query = query.Where(e => e.LedgerCreatedAt <= endTime.Value);
}
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(sb.ToString(), connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
foreach (var (name, value) in parameters)
{
command.Parameters.AddWithValue(name, value);
}
var result = await command.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return Convert.ToInt64(result);
return await query.LongCountAsync(cancellationToken).ConfigureAwait(false);
}
public async Task<ChainVerificationResult> VerifyChainAsync(
@@ -446,85 +405,30 @@ public sealed class PostgresLedgerRepository : ILedgerRepository
command.Parameters.AddWithValue("metadata", (object?)entry.Metadata ?? DBNull.Value);
}
private static RunLedgerEntry MapEntry(NpgsqlDataReader reader)
{
return new RunLedgerEntry(
LedgerId: reader.GetGuid(0),
TenantId: reader.GetString(1),
RunId: reader.GetGuid(2),
SourceId: reader.GetGuid(3),
RunType: reader.GetString(4),
FinalStatus: (RunStatus)reader.GetInt32(5),
TotalJobs: reader.GetInt32(6),
SucceededJobs: reader.GetInt32(7),
FailedJobs: reader.GetInt32(8),
RunCreatedAt: reader.GetFieldValue<DateTimeOffset>(9),
RunStartedAt: reader.IsDBNull(10) ? null : reader.GetFieldValue<DateTimeOffset>(10),
RunCompletedAt: reader.GetFieldValue<DateTimeOffset>(11),
ExecutionDuration: TimeSpan.FromMilliseconds(reader.GetInt64(12)),
InitiatedBy: reader.GetString(13),
InputDigest: reader.GetString(14),
OutputDigest: reader.GetString(15),
ArtifactManifest: reader.GetString(16),
SequenceNumber: reader.GetInt64(17),
PreviousEntryHash: reader.IsDBNull(18) ? null : reader.GetString(18),
ContentHash: reader.GetString(19),
LedgerCreatedAt: reader.GetFieldValue<DateTimeOffset>(20),
CorrelationId: reader.IsDBNull(21) ? null : reader.GetString(21),
Metadata: reader.IsDBNull(22) ? null : reader.GetString(22));
}
private static (string sql, List<(string name, object value)> parameters) BuildListQuery(
string tenantId,
string? runType,
Guid? sourceId,
RunStatus? finalStatus,
DateTimeOffset? startTime,
DateTimeOffset? endTime,
int limit,
int offset)
{
var sb = new StringBuilder();
sb.Append($"SELECT {SelectLedgerColumns} FROM run_ledger_entries WHERE tenant_id = @tenant_id");
var parameters = new List<(string, object)> { ("tenant_id", tenantId) };
if (runType is not null)
{
sb.Append(" AND run_type = @run_type");
parameters.Add(("run_type", runType));
}
if (sourceId.HasValue)
{
sb.Append(" AND source_id = @source_id");
parameters.Add(("source_id", sourceId.Value));
}
if (finalStatus.HasValue)
{
sb.Append(" AND final_status = @final_status");
parameters.Add(("final_status", (int)finalStatus.Value));
}
if (startTime.HasValue)
{
sb.Append(" AND ledger_created_at >= @start_time");
parameters.Add(("start_time", startTime.Value));
}
if (endTime.HasValue)
{
sb.Append(" AND ledger_created_at <= @end_time");
parameters.Add(("end_time", endTime.Value));
}
sb.Append(" ORDER BY ledger_created_at DESC LIMIT @limit OFFSET @offset");
parameters.Add(("limit", limit));
parameters.Add(("offset", offset));
return (sb.ToString(), parameters);
}
private static RunLedgerEntry MapEntry(RunLedgerEntryEntity e) => new(
LedgerId: e.LedgerId,
TenantId: e.TenantId,
RunId: e.RunId,
SourceId: e.SourceId,
RunType: e.RunType,
FinalStatus: (RunStatus)e.FinalStatus,
TotalJobs: e.TotalJobs,
SucceededJobs: e.SucceededJobs,
FailedJobs: e.FailedJobs,
RunCreatedAt: e.RunCreatedAt,
RunStartedAt: e.RunStartedAt,
RunCompletedAt: e.RunCompletedAt,
ExecutionDuration: TimeSpan.FromMilliseconds(e.ExecutionDurationMs),
InitiatedBy: e.InitiatedBy,
InputDigest: e.InputDigest,
OutputDigest: e.OutputDigest,
ArtifactManifest: e.ArtifactManifest,
SequenceNumber: e.SequenceNumber,
PreviousEntryHash: e.PreviousEntryHash,
ContentHash: e.ContentHash,
LedgerCreatedAt: e.LedgerCreatedAt,
CorrelationId: e.CorrelationId,
Metadata: e.Metadata);
}
/// <summary>

View File

@@ -1,59 +1,20 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using PackLogLevel = StellaOps.Orchestrator.Core.Domain.LogLevel;
using StellaOps.Orchestrator.Core.Domain;
using StellaOps.Orchestrator.Infrastructure.EfCore.Models;
using StellaOps.Orchestrator.Infrastructure.Repositories;
using PackLogLevel = StellaOps.Orchestrator.Core.Domain.LogLevel;
namespace StellaOps.Orchestrator.Infrastructure.Postgres;
/// <summary>
/// PostgreSQL implementation for pack run logs.
/// Uses EF Core for all CRUD operations.
/// </summary>
public sealed class PostgresPackRunLogRepository : IPackRunLogRepository
{
private const string Columns = "log_id, pack_run_id, tenant_id, sequence, log_level, source, message, data, created_at, digest, size_bytes";
private const string InsertSql = """
INSERT INTO pack_run_logs (log_id, tenant_id, pack_run_id, sequence, log_level, source, message, data, created_at, digest, size_bytes)
VALUES (@log_id, @tenant_id, @pack_run_id, @sequence, @log_level, @source, @message, @data, @created_at, @digest, @size_bytes)
""";
private const string SelectLogsSql = $"""
SELECT {Columns}
FROM pack_run_logs
WHERE tenant_id = @tenant_id AND pack_run_id = @pack_run_id AND sequence > @after
ORDER BY sequence
LIMIT @limit
""";
private const string SelectLogsByLevelSql = $"""
SELECT {Columns}
FROM pack_run_logs
WHERE tenant_id = @tenant_id AND pack_run_id = @pack_run_id AND sequence > @after AND log_level >= @min_level
ORDER BY sequence
LIMIT @limit
""";
private const string SearchLogsSql = $"""
SELECT {Columns}
FROM pack_run_logs
WHERE tenant_id = @tenant_id AND pack_run_id = @pack_run_id AND sequence > @after AND message ILIKE @pattern
ORDER BY sequence
LIMIT @limit
""";
private const string StatsSql = """
SELECT COUNT(*)::BIGINT, COALESCE(MAX(sequence), -1)
FROM pack_run_logs
WHERE tenant_id = @tenant_id AND pack_run_id = @pack_run_id
""";
private const string DeleteSql = """
DELETE FROM pack_run_logs
WHERE tenant_id = @tenant_id AND pack_run_id = @pack_run_id
""";
private const string DefaultSchema = OrchestratorDbContextFactory.DefaultSchemaName;
private readonly OrchestratorDataSource _dataSource;
private readonly ILogger<PostgresPackRunLogRepository> _logger;
@@ -67,12 +28,10 @@ public sealed class PostgresPackRunLogRepository : IPackRunLogRepository
public async Task AppendAsync(PackRunLog log, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(log.TenantId, "writer", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(InsertSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
AddParameters(command.Parameters, log);
await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
ctx.PackRunLogs.Add(ToEntity(log));
await ctx.SaveChangesAsync(cancellationToken).ConfigureAwait(false);
}
public async Task AppendBatchAsync(IReadOnlyList<PackRunLog> logs, CancellationToken cancellationToken)
@@ -84,140 +43,129 @@ public sealed class PostgresPackRunLogRepository : IPackRunLogRepository
var tenantId = logs[0].TenantId;
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
await using var batch = new NpgsqlBatch(connection);
foreach (var log in logs)
{
var cmd = new NpgsqlBatchCommand(InsertSql);
AddParameters(cmd.Parameters, log);
batch.BatchCommands.Add(cmd);
}
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await batch.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
ctx.PackRunLogs.AddRange(logs.Select(ToEntity));
await ctx.SaveChangesAsync(cancellationToken).ConfigureAwait(false);
}
public async Task<PackRunLogBatch> GetLogsAsync(string tenantId, Guid packRunId, long afterSequence, int limit, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectLogsSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pack_run_id", packRunId);
command.Parameters.AddWithValue("after", afterSequence);
command.Parameters.AddWithValue("limit", limit);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
return await ReadBatchAsync(reader, tenantId, packRunId, cancellationToken).ConfigureAwait(false);
var entities = await ctx.PackRunLogs
.AsNoTracking()
.Where(l => l.TenantId == tenantId && l.PackRunId == packRunId && l.Sequence > afterSequence)
.OrderBy(l => l.Sequence)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return ToBatch(entities, tenantId, packRunId);
}
public async Task<(long Count, long LatestSequence)> GetLogStatsAsync(string tenantId, Guid packRunId, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(StatsSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pack_run_id", packRunId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
var query = ctx.PackRunLogs
.AsNoTracking()
.Where(l => l.TenantId == tenantId && l.PackRunId == packRunId);
var count = await query.LongCountAsync(cancellationToken).ConfigureAwait(false);
if (count == 0)
{
return (0, -1);
}
var count = reader.GetInt64(0);
var latest = reader.GetInt64(1);
return (count, latest);
var latestSequence = await query.MaxAsync(l => l.Sequence, cancellationToken).ConfigureAwait(false);
return (count, latestSequence);
}
public async Task<PackRunLogBatch> GetLogsByLevelAsync(string tenantId, Guid packRunId, PackLogLevel minLevel, long afterSequence, int limit, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SelectLogsByLevelSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pack_run_id", packRunId);
command.Parameters.AddWithValue("after", afterSequence);
command.Parameters.AddWithValue("limit", limit);
command.Parameters.AddWithValue("min_level", (int)minLevel);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
return await ReadBatchAsync(reader, tenantId, packRunId, cancellationToken).ConfigureAwait(false);
var minLevelValue = (short)minLevel;
var entities = await ctx.PackRunLogs
.AsNoTracking()
.Where(l => l.TenantId == tenantId && l.PackRunId == packRunId && l.Sequence > afterSequence && l.LogLevel >= minLevelValue)
.OrderBy(l => l.Sequence)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return ToBatch(entities, tenantId, packRunId);
}
public async Task<PackRunLogBatch> SearchLogsAsync(string tenantId, Guid packRunId, string pattern, long afterSequence, int limit, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(SearchLogsSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pack_run_id", packRunId);
command.Parameters.AddWithValue("after", afterSequence);
command.Parameters.AddWithValue("limit", limit);
command.Parameters.AddWithValue("pattern", $"%{pattern}%");
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
return await ReadBatchAsync(reader, tenantId, packRunId, cancellationToken).ConfigureAwait(false);
var iLikePattern = $"%{pattern}%";
var entities = await ctx.PackRunLogs
.AsNoTracking()
.Where(l => l.TenantId == tenantId && l.PackRunId == packRunId && l.Sequence > afterSequence && EF.Functions.ILike(l.Message, iLikePattern))
.OrderBy(l => l.Sequence)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return ToBatch(entities, tenantId, packRunId);
}
public async Task<long> DeleteLogsAsync(string tenantId, Guid packRunId, CancellationToken cancellationToken)
{
await using var connection = await _dataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken).ConfigureAwait(false);
await using var command = new NpgsqlCommand(DeleteSql, connection);
command.CommandTimeout = _dataSource.CommandTimeoutSeconds;
command.Parameters.AddWithValue("tenant_id", tenantId);
command.Parameters.AddWithValue("pack_run_id", packRunId);
await using var ctx = OrchestratorDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, DefaultSchema);
var rows = await ctx.PackRunLogs
.Where(l => l.TenantId == tenantId && l.PackRunId == packRunId)
.ExecuteDeleteAsync(cancellationToken)
.ConfigureAwait(false);
var rows = await command.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
return rows;
}
private static void AddParameters(NpgsqlParameterCollection parameters, PackRunLog log)
private static PackRunLogEntity ToEntity(PackRunLog log) => new()
{
parameters.AddWithValue("log_id", log.LogId);
parameters.AddWithValue("tenant_id", log.TenantId);
parameters.AddWithValue("pack_run_id", log.PackRunId);
parameters.AddWithValue("sequence", log.Sequence);
parameters.AddWithValue("log_level", (int)log.Level);
parameters.AddWithValue("source", (object?)log.Source ?? DBNull.Value);
parameters.AddWithValue("message", log.Message);
parameters.Add(new NpgsqlParameter("data", NpgsqlDbType.Jsonb) { Value = (object?)log.Data ?? DBNull.Value });
parameters.AddWithValue("created_at", log.Timestamp);
parameters.AddWithValue("digest", log.Digest);
parameters.AddWithValue("size_bytes", log.SizeBytes);
}
LogId = log.LogId,
TenantId = log.TenantId,
PackRunId = log.PackRunId,
Sequence = log.Sequence,
LogLevel = (short)log.Level,
Source = log.Source,
Message = log.Message,
Data = log.Data,
CreatedAt = log.Timestamp,
Digest = log.Digest,
SizeBytes = log.SizeBytes
};
private static async Task<PackRunLogBatch> ReadBatchAsync(NpgsqlDataReader reader, string tenantId, Guid packRunId, CancellationToken cancellationToken)
private static PackRunLog MapLog(PackRunLogEntity e) => new(
LogId: e.LogId,
TenantId: e.TenantId,
PackRunId: e.PackRunId,
Sequence: e.Sequence,
Level: (PackLogLevel)e.LogLevel,
Source: e.Source ?? "unknown",
Message: e.Message,
Digest: e.Digest,
SizeBytes: e.SizeBytes,
Timestamp: e.CreatedAt,
Data: e.Data);
private static PackRunLogBatch ToBatch(List<PackRunLogEntity> entities, string tenantId, Guid packRunId)
{
var logs = new List<PackRunLog>();
long startSequence = -1;
while (await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
{
var log = new PackRunLog(
LogId: reader.GetGuid(0),
TenantId: reader.GetString(2),
PackRunId: reader.GetGuid(1),
Sequence: reader.GetInt64(3),
Level: (PackLogLevel)reader.GetInt32(4),
Source: reader.IsDBNull(5) ? "unknown" : reader.GetString(5),
Message: reader.GetString(6),
Digest: reader.GetString(9),
SizeBytes: reader.GetInt64(10),
Timestamp: reader.GetFieldValue<DateTimeOffset>(8),
Data: reader.IsDBNull(7) ? null : reader.GetString(7));
if (startSequence < 0)
{
startSequence = log.Sequence;
}
logs.Add(log);
}
if (startSequence < 0)
{
startSequence = 0;
}
var logs = entities.Select(MapLog).ToList();
var startSequence = logs.Count > 0 ? logs[0].Sequence : 0;
return new PackRunLogBatch(packRunId, tenantId, startSequence, logs);
}
}

View File

@@ -1,3 +1,4 @@
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Infrastructure.Postgres.Repositories;
@@ -7,6 +8,8 @@ namespace StellaOps.Policy.Persistence.Postgres.Repositories;
/// <summary>
/// PostgreSQL repository for policy exception operations.
/// Simple reads (GetById, GetByName, GetAll, Delete) use EF Core.
/// Complex writes/queries (Create, Update, Approve, Revoke, Expire, regex-based) use raw SQL.
/// </summary>
public sealed class ExceptionRepository : RepositoryBase<PolicyDataSource>, IExceptionRepository
{
@@ -48,35 +51,27 @@ public sealed class ExceptionRepository : RepositoryBase<PolicyDataSource>, IExc
/// <inheritdoc />
public async Task<ExceptionEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
{
const string sql = "SELECT * FROM policy.exceptions WHERE tenant_id = @tenant_id AND id = @id";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken)
.ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
return await QuerySingleOrDefaultAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "id", id);
},
MapException,
cancellationToken).ConfigureAwait(false);
return await dbContext.Exceptions
.AsNoTracking()
.FirstOrDefaultAsync(e => e.TenantId == tenantId && e.Id == id, cancellationToken)
.ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<ExceptionEntity?> GetByNameAsync(string tenantId, string name, CancellationToken cancellationToken = default)
{
const string sql = "SELECT * FROM policy.exceptions WHERE tenant_id = @tenant_id AND name = @name";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken)
.ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
return await QuerySingleOrDefaultAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "name", name);
},
MapException,
cancellationToken).ConfigureAwait(false);
return await dbContext.Exceptions
.AsNoTracking()
.FirstOrDefaultAsync(e => e.TenantId == tenantId && e.Name == name, cancellationToken)
.ConfigureAwait(false);
}
/// <inheritdoc />
@@ -87,30 +82,26 @@ public sealed class ExceptionRepository : RepositoryBase<PolicyDataSource>, IExc
int offset = 0,
CancellationToken cancellationToken = default)
{
var sql = "SELECT * FROM policy.exceptions WHERE tenant_id = @tenant_id";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "reader", cancellationToken)
.ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
var q = dbContext.Exceptions
.AsNoTracking()
.Where(e => e.TenantId == tenantId);
if (status.HasValue)
{
sql += " AND status = @status";
q = q.Where(e => e.Status == status.Value);
}
sql += " ORDER BY name, id LIMIT @limit OFFSET @offset";
return await QueryAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
if (status.HasValue)
{
AddParameter(cmd, "status", StatusToString(status.Value));
}
AddParameter(cmd, "limit", limit);
AddParameter(cmd, "offset", offset);
},
MapException,
cancellationToken).ConfigureAwait(false);
return await q
.OrderBy(e => e.Name)
.ThenBy(e => e.Id)
.Skip(offset)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
}
/// <inheritdoc />
@@ -277,17 +268,14 @@ public sealed class ExceptionRepository : RepositoryBase<PolicyDataSource>, IExc
/// <inheritdoc />
public async Task<bool> DeleteAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
{
const string sql = "DELETE FROM policy.exceptions WHERE tenant_id = @tenant_id AND id = @id";
await using var connection = await DataSource.OpenConnectionAsync(tenantId, "writer", cancellationToken)
.ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
var rows = await ExecuteAsync(
tenantId,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
AddParameter(cmd, "id", id);
},
cancellationToken).ConfigureAwait(false);
var rows = await dbContext.Exceptions
.Where(e => e.TenantId == tenantId && e.Id == id)
.ExecuteDeleteAsync(cancellationToken)
.ConfigureAwait(false);
return rows > 0;
}

View File

@@ -1,26 +1,26 @@
// -----------------------------------------------------------------------------
// GateDecisionHistoryRepository.cs
// Sprint: SPRINT_20260118_019_Policy_gate_replay_api_exposure
// Task: GR-005 - Add gate decision history endpoint
// Description: Repository for querying historical gate decisions
// Sprint: SPRINT_20260225_115_Policy_dal_ef_wrapper_removal_crud_migration
// Task: T1 - Migrate GateDecisionHistoryRepository to EF Core
// Description: Repository for querying historical gate decisions (EF Core)
// -----------------------------------------------------------------------------
using Npgsql;
using System.Data;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using StellaOps.Infrastructure.Postgres.Repositories;
using StellaOps.Policy.Persistence.Postgres.Models;
namespace StellaOps.Policy.Persistence.Postgres.Repositories;
/// <summary>
/// Repository for querying historical gate decisions.
/// Migrated from raw Npgsql to EF Core.
/// </summary>
public sealed class GateDecisionHistoryRepository : IGateDecisionHistoryRepository
public sealed class GateDecisionHistoryRepository : RepositoryBase<PolicyDataSource>, IGateDecisionHistoryRepository
{
private readonly string _connectionString;
public GateDecisionHistoryRepository(string connectionString)
public GateDecisionHistoryRepository(PolicyDataSource dataSource, ILogger<GateDecisionHistoryRepository> logger)
: base(dataSource, logger)
{
_connectionString = connectionString ?? throw new ArgumentNullException(nameof(connectionString));
}
/// <inheritdoc />
@@ -28,116 +28,52 @@ public sealed class GateDecisionHistoryRepository : IGateDecisionHistoryReposito
GateDecisionHistoryQuery query,
CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
query.TenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
// Build query with filters
var sql = """
SELECT
decision_id,
bom_ref,
image_digest,
gate_status,
verdict_hash,
policy_bundle_id,
policy_bundle_hash,
evaluated_at,
ci_context,
actor,
blocking_unknown_ids,
warnings
FROM policy.gate_decisions
WHERE tenant_id = @tenant_id
""";
var parameters = new List<NpgsqlParameter>
{
new("tenant_id", query.TenantId)
};
var q = dbContext.GateDecisions
.AsNoTracking()
.Where(d => d.TenantId == query.TenantId);
if (!string.IsNullOrEmpty(query.GateId))
{
sql += " AND gate_id = @gate_id";
parameters.Add(new NpgsqlParameter("gate_id", query.GateId));
}
if (query.FromDate.HasValue)
{
sql += " AND evaluated_at >= @from_date";
parameters.Add(new NpgsqlParameter("from_date", query.FromDate.Value));
}
if (query.ToDate.HasValue)
{
sql += " AND evaluated_at <= @to_date";
parameters.Add(new NpgsqlParameter("to_date", query.ToDate.Value));
}
if (!string.IsNullOrEmpty(query.Status))
{
sql += " AND gate_status = @status";
parameters.Add(new NpgsqlParameter("status", query.Status));
}
if (!string.IsNullOrEmpty(query.Actor))
{
sql += " AND actor = @actor";
parameters.Add(new NpgsqlParameter("actor", query.Actor));
}
q = q.Where(d => d.GateId == query.GateId);
if (!string.IsNullOrEmpty(query.BomRef))
{
sql += " AND bom_ref = @bom_ref";
parameters.Add(new NpgsqlParameter("bom_ref", query.BomRef));
}
q = q.Where(d => d.BomRef == query.BomRef);
// Get total count first
var countSql = $"SELECT COUNT(*) FROM ({sql}) AS filtered";
await using var countCmd = new NpgsqlCommand(countSql, conn);
countCmd.Parameters.AddRange(parameters.ToArray());
var totalCount = Convert.ToInt64(await countCmd.ExecuteScalarAsync(ct));
if (query.FromDate.HasValue)
q = q.Where(d => d.EvaluatedAt >= query.FromDate.Value);
// Apply pagination
sql += " ORDER BY evaluated_at DESC";
if (query.ToDate.HasValue)
q = q.Where(d => d.EvaluatedAt <= query.ToDate.Value);
if (!string.IsNullOrEmpty(query.Status))
q = q.Where(d => d.GateStatus == query.Status);
if (!string.IsNullOrEmpty(query.Actor))
q = q.Where(d => d.Actor == query.Actor);
var totalCount = await q.LongCountAsync(ct).ConfigureAwait(false);
var ordered = q.OrderByDescending(d => d.EvaluatedAt)
.ThenByDescending(d => d.DecisionId);
if (!string.IsNullOrEmpty(query.ContinuationToken))
{
var offset = DecodeContinuationToken(query.ContinuationToken);
sql += $" OFFSET {offset}";
ordered = (IOrderedQueryable<GateDecisionEntity>)ordered.Skip((int)offset);
}
sql += $" LIMIT {query.Limit + 1}"; // +1 to detect if there are more results
var entities = await ordered
.Take(query.Limit + 1)
.ToListAsync(ct)
.ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddRange(parameters.Select(p => p.Clone()).Cast<NpgsqlParameter>().ToArray());
var decisions = new List<GateDecisionRecord>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
decisions.Add(new GateDecisionRecord
{
DecisionId = reader.GetGuid(0),
BomRef = reader.GetString(1),
ImageDigest = reader.IsDBNull(2) ? null : reader.GetString(2),
GateStatus = reader.GetString(3),
VerdictHash = reader.IsDBNull(4) ? null : reader.GetString(4),
PolicyBundleId = reader.IsDBNull(5) ? null : reader.GetString(5),
PolicyBundleHash = reader.IsDBNull(6) ? null : reader.GetString(6),
EvaluatedAt = reader.GetDateTime(7),
CiContext = reader.IsDBNull(8) ? null : reader.GetString(8),
Actor = reader.IsDBNull(9) ? null : reader.GetString(9),
BlockingUnknownIds = reader.IsDBNull(10) ? [] : ParseGuidArray(reader.GetString(10)),
Warnings = reader.IsDBNull(11) ? [] : ParseStringArray(reader.GetString(11))
});
}
// Check if there are more results
var hasMore = decisions.Count > query.Limit;
var hasMore = entities.Count > query.Limit;
if (hasMore)
{
decisions.RemoveAt(decisions.Count - 1);
entities.RemoveAt(entities.Count - 1);
}
string? nextToken = null;
@@ -151,7 +87,7 @@ public sealed class GateDecisionHistoryRepository : IGateDecisionHistoryReposito
return new GateDecisionHistoryResult
{
Decisions = decisions,
Decisions = entities.Select(MapToRecord).ToList(),
Total = totalCount,
ContinuationToken = nextToken
};
@@ -163,90 +99,63 @@ public sealed class GateDecisionHistoryRepository : IGateDecisionHistoryReposito
Guid tenantId,
CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
tenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
const string sql = """
SELECT
decision_id,
bom_ref,
image_digest,
gate_status,
verdict_hash,
policy_bundle_id,
policy_bundle_hash,
evaluated_at,
ci_context,
actor,
blocking_unknown_ids,
warnings
FROM policy.gate_decisions
WHERE decision_id = @decision_id AND tenant_id = @tenant_id
""";
var entity = await dbContext.GateDecisions
.AsNoTracking()
.FirstOrDefaultAsync(d => d.DecisionId == decisionId && d.TenantId == tenantId, ct)
.ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("decision_id", decisionId);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (!await reader.ReadAsync(ct))
{
return null;
}
return new GateDecisionRecord
{
DecisionId = reader.GetGuid(0),
BomRef = reader.GetString(1),
ImageDigest = reader.IsDBNull(2) ? null : reader.GetString(2),
GateStatus = reader.GetString(3),
VerdictHash = reader.IsDBNull(4) ? null : reader.GetString(4),
PolicyBundleId = reader.IsDBNull(5) ? null : reader.GetString(5),
PolicyBundleHash = reader.IsDBNull(6) ? null : reader.GetString(6),
EvaluatedAt = reader.GetDateTime(7),
CiContext = reader.IsDBNull(8) ? null : reader.GetString(8),
Actor = reader.IsDBNull(9) ? null : reader.GetString(9),
BlockingUnknownIds = reader.IsDBNull(10) ? [] : ParseGuidArray(reader.GetString(10)),
Warnings = reader.IsDBNull(11) ? [] : ParseStringArray(reader.GetString(11))
};
return entity is null ? null : MapToRecord(entity);
}
/// <inheritdoc />
public async Task RecordDecisionAsync(GateDecisionRecord decision, Guid tenantId, CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
tenantId.ToString(), "writer", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
const string sql = """
INSERT INTO policy.gate_decisions (
decision_id, tenant_id, bom_ref, image_digest, gate_status, verdict_hash,
policy_bundle_id, policy_bundle_hash, evaluated_at, ci_context, actor,
blocking_unknown_ids, warnings
) VALUES (
@decision_id, @tenant_id, @bom_ref, @image_digest, @gate_status, @verdict_hash,
@policy_bundle_id, @policy_bundle_hash, @evaluated_at, @ci_context, @actor,
@blocking_unknown_ids, @warnings
)
""";
var entity = new GateDecisionEntity
{
DecisionId = decision.DecisionId,
TenantId = tenantId,
GateId = string.Empty, // gate_id not provided by GateDecisionRecord; matches original INSERT
BomRef = decision.BomRef,
ImageDigest = decision.ImageDigest,
GateStatus = decision.GateStatus,
VerdictHash = decision.VerdictHash,
PolicyBundleId = decision.PolicyBundleId,
PolicyBundleHash = decision.PolicyBundleHash,
EvaluatedAt = decision.EvaluatedAt,
CiContext = decision.CiContext,
Actor = decision.Actor,
BlockingUnknownIds = SerializeGuidArray(decision.BlockingUnknownIds),
Warnings = SerializeStringArray(decision.Warnings)
};
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("decision_id", decision.DecisionId);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
cmd.Parameters.AddWithValue("bom_ref", decision.BomRef);
cmd.Parameters.AddWithValue("image_digest", (object?)decision.ImageDigest ?? DBNull.Value);
cmd.Parameters.AddWithValue("gate_status", decision.GateStatus);
cmd.Parameters.AddWithValue("verdict_hash", (object?)decision.VerdictHash ?? DBNull.Value);
cmd.Parameters.AddWithValue("policy_bundle_id", (object?)decision.PolicyBundleId ?? DBNull.Value);
cmd.Parameters.AddWithValue("policy_bundle_hash", (object?)decision.PolicyBundleHash ?? DBNull.Value);
cmd.Parameters.AddWithValue("evaluated_at", decision.EvaluatedAt);
cmd.Parameters.AddWithValue("ci_context", (object?)decision.CiContext ?? DBNull.Value);
cmd.Parameters.AddWithValue("actor", (object?)decision.Actor ?? DBNull.Value);
cmd.Parameters.AddWithValue("blocking_unknown_ids", SerializeGuidArray(decision.BlockingUnknownIds));
cmd.Parameters.AddWithValue("warnings", SerializeStringArray(decision.Warnings));
await cmd.ExecuteNonQueryAsync(ct);
dbContext.GateDecisions.Add(entity);
await dbContext.SaveChangesAsync(ct).ConfigureAwait(false);
}
private static GateDecisionRecord MapToRecord(GateDecisionEntity entity) => new()
{
DecisionId = entity.DecisionId,
BomRef = entity.BomRef,
ImageDigest = entity.ImageDigest,
GateStatus = entity.GateStatus,
VerdictHash = entity.VerdictHash,
PolicyBundleId = entity.PolicyBundleId,
PolicyBundleHash = entity.PolicyBundleHash,
EvaluatedAt = entity.EvaluatedAt.UtcDateTime,
CiContext = entity.CiContext,
Actor = entity.Actor,
BlockingUnknownIds = ParseGuidArray(entity.BlockingUnknownIds),
Warnings = ParseStringArray(entity.Warnings)
};
private static string EncodeContinuationToken(long offset) =>
Convert.ToBase64String(BitConverter.GetBytes(offset));
@@ -263,8 +172,9 @@ public sealed class GateDecisionHistoryRepository : IGateDecisionHistoryReposito
}
}
private static List<Guid> ParseGuidArray(string json)
private static List<Guid> ParseGuidArray(string? json)
{
if (string.IsNullOrEmpty(json)) return [];
try
{
return System.Text.Json.JsonSerializer.Deserialize<List<Guid>>(json) ?? [];
@@ -275,8 +185,9 @@ public sealed class GateDecisionHistoryRepository : IGateDecisionHistoryReposito
}
}
private static List<string> ParseStringArray(string json)
private static List<string> ParseStringArray(string? json)
{
if (string.IsNullOrEmpty(json)) return [];
try
{
return System.Text.Json.JsonSerializer.Deserialize<List<string>>(json) ?? [];

View File

@@ -1,184 +1,112 @@
// -----------------------------------------------------------------------------
// ReplayAuditRepository.cs
// Sprint: SPRINT_20260118_019_Policy_gate_replay_api_exposure
// Task: GR-007 - Create replay audit trail
// Description: Repository for recording and querying replay audit records
// Sprint: SPRINT_20260225_115_Policy_dal_ef_wrapper_removal_crud_migration
// Task: T2 - Migrate ReplayAuditRepository to EF Core
// Description: Repository for recording and querying replay audit records (EF Core)
// -----------------------------------------------------------------------------
using Npgsql;
using System.Text.Json;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using StellaOps.Infrastructure.Postgres.Repositories;
using StellaOps.Policy.Persistence.Postgres.Models;
namespace StellaOps.Policy.Persistence.Postgres.Repositories;
/// <summary>
/// Repository for recording and querying replay audit records.
/// Migrated from raw Npgsql to EF Core.
/// </summary>
public sealed class ReplayAuditRepository : IReplayAuditRepository
public sealed class ReplayAuditRepository : RepositoryBase<PolicyDataSource>, IReplayAuditRepository
{
private readonly string _connectionString;
public ReplayAuditRepository(string connectionString)
public ReplayAuditRepository(PolicyDataSource dataSource, ILogger<ReplayAuditRepository> logger)
: base(dataSource, logger)
{
_connectionString = connectionString ?? throw new ArgumentNullException(nameof(connectionString));
}
/// <inheritdoc />
public async Task RecordReplayAsync(ReplayAuditRecord record, CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
record.TenantId.ToString(), "writer", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
const string sql = """
INSERT INTO policy.replay_audit (
replay_id, tenant_id, bom_ref, verdict_hash, rekor_uuid, replayed_at,
match, original_hash, replayed_hash, mismatch_reason,
policy_bundle_id, policy_bundle_hash, verifier_digest,
duration_ms, actor, source, request_context
) VALUES (
@replay_id, @tenant_id, @bom_ref, @verdict_hash, @rekor_uuid, @replayed_at,
@match, @original_hash, @replayed_hash, @mismatch_reason,
@policy_bundle_id, @policy_bundle_hash, @verifier_digest,
@duration_ms, @actor, @source, @request_context::jsonb
)
""";
var entity = new ReplayAuditEntity
{
ReplayId = record.ReplayId,
TenantId = record.TenantId,
BomRef = record.BomRef,
VerdictHash = record.VerdictHash,
RekorUuid = record.RekorUuid,
ReplayedAt = record.ReplayedAt,
Match = record.Match,
OriginalHash = record.OriginalHash,
ReplayedHash = record.ReplayedHash,
MismatchReason = record.MismatchReason,
PolicyBundleId = record.PolicyBundleId,
PolicyBundleHash = record.PolicyBundleHash,
VerifierDigest = record.VerifierDigest,
DurationMs = record.DurationMs,
Actor = record.Actor,
Source = record.Source,
RequestContext = record.RequestContext
};
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("replay_id", record.ReplayId);
cmd.Parameters.AddWithValue("tenant_id", record.TenantId);
cmd.Parameters.AddWithValue("bom_ref", record.BomRef);
cmd.Parameters.AddWithValue("verdict_hash", record.VerdictHash);
cmd.Parameters.AddWithValue("rekor_uuid", (object?)record.RekorUuid ?? DBNull.Value);
cmd.Parameters.AddWithValue("replayed_at", record.ReplayedAt);
cmd.Parameters.AddWithValue("match", record.Match);
cmd.Parameters.AddWithValue("original_hash", (object?)record.OriginalHash ?? DBNull.Value);
cmd.Parameters.AddWithValue("replayed_hash", (object?)record.ReplayedHash ?? DBNull.Value);
cmd.Parameters.AddWithValue("mismatch_reason", (object?)record.MismatchReason ?? DBNull.Value);
cmd.Parameters.AddWithValue("policy_bundle_id", (object?)record.PolicyBundleId ?? DBNull.Value);
cmd.Parameters.AddWithValue("policy_bundle_hash", (object?)record.PolicyBundleHash ?? DBNull.Value);
cmd.Parameters.AddWithValue("verifier_digest", (object?)record.VerifierDigest ?? DBNull.Value);
cmd.Parameters.AddWithValue("duration_ms", (object?)record.DurationMs ?? DBNull.Value);
cmd.Parameters.AddWithValue("actor", (object?)record.Actor ?? DBNull.Value);
cmd.Parameters.AddWithValue("source", (object?)record.Source ?? DBNull.Value);
cmd.Parameters.AddWithValue("request_context", (object?)record.RequestContext ?? DBNull.Value);
await cmd.ExecuteNonQueryAsync(ct);
dbContext.ReplayAudit.Add(entity);
await dbContext.SaveChangesAsync(ct).ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<ReplayAuditResult> QueryAsync(ReplayAuditQuery query, CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
query.TenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
var sql = """
SELECT
replay_id, tenant_id, bom_ref, verdict_hash, rekor_uuid, replayed_at,
match, original_hash, replayed_hash, mismatch_reason,
policy_bundle_id, policy_bundle_hash, verifier_digest,
duration_ms, actor, source, request_context
FROM policy.replay_audit
WHERE tenant_id = @tenant_id
""";
var parameters = new List<NpgsqlParameter>
{
new("tenant_id", query.TenantId)
};
var q = dbContext.ReplayAudit
.AsNoTracking()
.Where(r => r.TenantId == query.TenantId);
if (!string.IsNullOrEmpty(query.BomRef))
{
sql += " AND bom_ref = @bom_ref";
parameters.Add(new NpgsqlParameter("bom_ref", query.BomRef));
}
q = q.Where(r => r.BomRef == query.BomRef);
if (!string.IsNullOrEmpty(query.VerdictHash))
{
sql += " AND verdict_hash = @verdict_hash";
parameters.Add(new NpgsqlParameter("verdict_hash", query.VerdictHash));
}
q = q.Where(r => r.VerdictHash == query.VerdictHash);
if (!string.IsNullOrEmpty(query.RekorUuid))
{
sql += " AND rekor_uuid = @rekor_uuid";
parameters.Add(new NpgsqlParameter("rekor_uuid", query.RekorUuid));
}
q = q.Where(r => r.RekorUuid == query.RekorUuid);
if (query.FromDate.HasValue)
{
sql += " AND replayed_at >= @from_date";
parameters.Add(new NpgsqlParameter("from_date", query.FromDate.Value));
}
q = q.Where(r => r.ReplayedAt >= query.FromDate.Value);
if (query.ToDate.HasValue)
{
sql += " AND replayed_at <= @to_date";
parameters.Add(new NpgsqlParameter("to_date", query.ToDate.Value));
}
q = q.Where(r => r.ReplayedAt <= query.ToDate.Value);
if (query.MatchOnly.HasValue)
{
sql += " AND match = @match";
parameters.Add(new NpgsqlParameter("match", query.MatchOnly.Value));
}
q = q.Where(r => r.Match == query.MatchOnly.Value);
if (!string.IsNullOrEmpty(query.Actor))
{
sql += " AND actor = @actor";
parameters.Add(new NpgsqlParameter("actor", query.Actor));
}
q = q.Where(r => r.Actor == query.Actor);
// Get total count
var countSql = $"SELECT COUNT(*) FROM ({sql}) AS filtered";
await using var countCmd = new NpgsqlCommand(countSql, conn);
countCmd.Parameters.AddRange(parameters.ToArray());
var totalCount = Convert.ToInt64(await countCmd.ExecuteScalarAsync(ct));
var totalCount = await q.LongCountAsync(ct).ConfigureAwait(false);
// Apply pagination
sql += " ORDER BY replayed_at DESC";
var ordered = q.OrderByDescending(r => r.ReplayedAt)
.ThenByDescending(r => r.ReplayId);
if (!string.IsNullOrEmpty(query.ContinuationToken))
{
var offset = DecodeContinuationToken(query.ContinuationToken);
sql += $" OFFSET {offset}";
ordered = (IOrderedQueryable<ReplayAuditEntity>)ordered.Skip((int)offset);
}
sql += $" LIMIT {query.Limit + 1}";
var entities = await ordered
.Take(query.Limit + 1)
.ToListAsync(ct)
.ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddRange(parameters.Select(p => p.Clone()).Cast<NpgsqlParameter>().ToArray());
var records = new List<ReplayAuditRecord>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
records.Add(new ReplayAuditRecord
{
ReplayId = reader.GetGuid(0),
TenantId = reader.GetGuid(1),
BomRef = reader.GetString(2),
VerdictHash = reader.GetString(3),
RekorUuid = reader.IsDBNull(4) ? null : reader.GetString(4),
ReplayedAt = reader.GetDateTime(5),
Match = reader.GetBoolean(6),
OriginalHash = reader.IsDBNull(7) ? null : reader.GetString(7),
ReplayedHash = reader.IsDBNull(8) ? null : reader.GetString(8),
MismatchReason = reader.IsDBNull(9) ? null : reader.GetString(9),
PolicyBundleId = reader.IsDBNull(10) ? null : reader.GetString(10),
PolicyBundleHash = reader.IsDBNull(11) ? null : reader.GetString(11),
VerifierDigest = reader.IsDBNull(12) ? null : reader.GetString(12),
DurationMs = reader.IsDBNull(13) ? null : reader.GetInt32(13),
Actor = reader.IsDBNull(14) ? null : reader.GetString(14),
Source = reader.IsDBNull(15) ? null : reader.GetString(15),
RequestContext = reader.IsDBNull(16) ? null : reader.GetString(16)
});
}
var hasMore = records.Count > query.Limit;
var hasMore = entities.Count > query.Limit;
if (hasMore)
{
records.RemoveAt(records.Count - 1);
entities.RemoveAt(entities.Count - 1);
}
string? nextToken = null;
@@ -192,7 +120,7 @@ public sealed class ReplayAuditRepository : IReplayAuditRepository
return new ReplayAuditResult
{
Records = records,
Records = entities.Select(MapToRecord).ToList(),
Total = totalCount,
ContinuationToken = nextToken
};
@@ -204,49 +132,16 @@ public sealed class ReplayAuditRepository : IReplayAuditRepository
Guid tenantId,
CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
tenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
const string sql = """
SELECT
replay_id, tenant_id, bom_ref, verdict_hash, rekor_uuid, replayed_at,
match, original_hash, replayed_hash, mismatch_reason,
policy_bundle_id, policy_bundle_hash, verifier_digest,
duration_ms, actor, source, request_context
FROM policy.replay_audit
WHERE replay_id = @replay_id AND tenant_id = @tenant_id
""";
var entity = await dbContext.ReplayAudit
.AsNoTracking()
.FirstOrDefaultAsync(r => r.ReplayId == replayId && r.TenantId == tenantId, ct)
.ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("replay_id", replayId);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (!await reader.ReadAsync(ct))
{
return null;
}
return new ReplayAuditRecord
{
ReplayId = reader.GetGuid(0),
TenantId = reader.GetGuid(1),
BomRef = reader.GetString(2),
VerdictHash = reader.GetString(3),
RekorUuid = reader.IsDBNull(4) ? null : reader.GetString(4),
ReplayedAt = reader.GetDateTime(5),
Match = reader.GetBoolean(6),
OriginalHash = reader.IsDBNull(7) ? null : reader.GetString(7),
ReplayedHash = reader.IsDBNull(8) ? null : reader.GetString(8),
MismatchReason = reader.IsDBNull(9) ? null : reader.GetString(9),
PolicyBundleId = reader.IsDBNull(10) ? null : reader.GetString(10),
PolicyBundleHash = reader.IsDBNull(11) ? null : reader.GetString(11),
VerifierDigest = reader.IsDBNull(12) ? null : reader.GetString(12),
DurationMs = reader.IsDBNull(13) ? null : reader.GetInt32(13),
Actor = reader.IsDBNull(14) ? null : reader.GetString(14),
Source = reader.IsDBNull(15) ? null : reader.GetString(15),
RequestContext = reader.IsDBNull(16) ? null : reader.GetString(16)
};
return entity is null ? null : MapToRecord(entity);
}
/// <inheritdoc />
@@ -256,47 +151,31 @@ public sealed class ReplayAuditRepository : IReplayAuditRepository
DateTimeOffset? toDate,
CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
await using var connection = await DataSource.OpenConnectionAsync(
tenantId.ToString(), "reader", ct).ConfigureAwait(false);
await using var dbContext = PolicyDbContextFactory.Create(connection, CommandTimeoutSeconds, DataSource.SchemaName);
var sql = """
SELECT
COUNT(*) AS total_attempts,
COUNT(*) FILTER (WHERE match = true) AS successful_matches,
COUNT(*) FILTER (WHERE match = false) AS mismatches,
AVG(duration_ms) AS avg_duration_ms
FROM policy.replay_audit
WHERE tenant_id = @tenant_id
""";
var parameters = new List<NpgsqlParameter>
{
new("tenant_id", tenantId)
};
var q = dbContext.ReplayAudit
.AsNoTracking()
.Where(r => r.TenantId == tenantId);
if (fromDate.HasValue)
{
sql += " AND replayed_at >= @from_date";
parameters.Add(new NpgsqlParameter("from_date", fromDate.Value));
}
q = q.Where(r => r.ReplayedAt >= fromDate.Value);
if (toDate.HasValue)
q = q.Where(r => r.ReplayedAt <= toDate.Value);
var totalAttempts = await q.LongCountAsync(ct).ConfigureAwait(false);
var successfulMatches = await q.LongCountAsync(r => r.Match, ct).ConfigureAwait(false);
var mismatches = await q.LongCountAsync(r => !r.Match, ct).ConfigureAwait(false);
double? avgDurationMs = null;
var withDuration = q.Where(r => r.DurationMs != null);
if (await withDuration.AnyAsync(ct).ConfigureAwait(false))
{
sql += " AND replayed_at <= @to_date";
parameters.Add(new NpgsqlParameter("to_date", toDate.Value));
avgDurationMs = await withDuration.AverageAsync(r => (double)r.DurationMs!, ct).ConfigureAwait(false);
}
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddRange(parameters.ToArray());
await using var reader = await cmd.ExecuteReaderAsync(ct);
await reader.ReadAsync(ct);
var totalAttempts = reader.GetInt64(0);
var successfulMatches = reader.GetInt64(1);
var mismatches = reader.GetInt64(2);
var avgDurationMs = reader.IsDBNull(3) ? null : (double?)reader.GetDouble(3);
return new ReplayMetrics
{
TotalAttempts = totalAttempts,
@@ -307,6 +186,27 @@ public sealed class ReplayAuditRepository : IReplayAuditRepository
};
}
private static ReplayAuditRecord MapToRecord(ReplayAuditEntity entity) => new()
{
ReplayId = entity.ReplayId,
TenantId = entity.TenantId,
BomRef = entity.BomRef,
VerdictHash = entity.VerdictHash,
RekorUuid = entity.RekorUuid,
ReplayedAt = entity.ReplayedAt.UtcDateTime,
Match = entity.Match,
OriginalHash = entity.OriginalHash,
ReplayedHash = entity.ReplayedHash,
MismatchReason = entity.MismatchReason,
PolicyBundleId = entity.PolicyBundleId,
PolicyBundleHash = entity.PolicyBundleHash,
VerifierDigest = entity.VerifierDigest,
DurationMs = entity.DurationMs,
Actor = entity.Actor,
Source = entity.Source,
RequestContext = entity.RequestContext
};
private static string EncodeContinuationToken(long offset) =>
Convert.ToBase64String(BitConverter.GetBytes(offset));

View File

@@ -276,6 +276,7 @@
"scanner.epss.cve_ids_required": "At least one CVE ID is required.",
"scanner.epss.batch_size_exceeded": "Batch size exceeded",
"scanner.epss.batch_size_detail": "Maximum batch size is 1000 CVE IDs.",
"scanner.epss.batch_size_exceeded_detail": "Maximum batch size is 1000 CVE IDs.",
"scanner.epss.data_unavailable": "EPSS data is not available. Please ensure EPSS data has been ingested.",
"scanner.epss.invalid_cve_id": "Invalid CVE ID",
"scanner.epss.cve_id_required": "CVE ID is required.",
@@ -283,8 +284,11 @@
"scanner.epss.cve_not_found_detail": "No EPSS score found for {cveId}.",
"scanner.epss.invalid_date_format": "Invalid date format",
"scanner.epss.date_format_detail": "Dates must be in yyyy-MM-dd format.",
"scanner.epss.invalid_date_format_detail": "Dates must be in yyyy-MM-dd format.",
"scanner.epss.no_history": "No history found",
"scanner.epss.no_history_detail": "No EPSS history found for {cveId} in the specified date range.",
"scanner.epss.no_history_found": "No history found",
"scanner.epss.no_history_found_detail": "No EPSS history found for {cveId} in the specified date range.",
"scanner.webhook.invalid_tenant": "Invalid tenant context",
"scanner.webhook.source_not_found": "Source not found",

View File

@@ -27,6 +27,27 @@ public partial class ScannerDbContext : DbContext
public virtual DbSet<CallGraphSnapshotEntity> CallGraphSnapshots { get; set; }
public virtual DbSet<ReachabilityResultEntity> ReachabilityResults { get; set; }
// ----- Reachability drift tables (scanner schema) -----
public virtual DbSet<CodeChangeEntity> CodeChanges { get; set; }
public virtual DbSet<ReachabilityDriftResultEntity> ReachabilityDriftResults { get; set; }
public virtual DbSet<DriftedSinkEntity> DriftedSinks { get; set; }
// ----- EPSS tables (scanner schema) -----
public virtual DbSet<EpssRawEntity> EpssRaw { get; set; }
public virtual DbSet<EpssSignalEntity> EpssSignals { get; set; }
public virtual DbSet<EpssSignalConfigEntity> EpssSignalConfigs { get; set; }
// ----- VEX candidates (scanner schema) -----
public virtual DbSet<VexCandidateEntity> VexCandidates { get; set; }
// ----- Function-level proof tables (scanner schema) -----
public virtual DbSet<FuncProofEntity> FuncProofs { get; set; }
public virtual DbSet<FuncNodeEntity> FuncNodes { get; set; }
public virtual DbSet<FuncTraceEntity> FuncTraces { get; set; }
// ----- Facet seals (scanner schema) -----
public virtual DbSet<FacetSealEntity> FacetSeals { get; set; }
// ----- Public/default schema tables -----
public virtual DbSet<ScanManifestEntity> ScanManifests { get; set; }
public virtual DbSet<ProofBundleEntity> ProofBundles { get; set; }
@@ -390,6 +411,336 @@ public partial class ScannerDbContext : DbContext
entity.Property(e => e.RekorTileId).HasColumnName("rekor_tile_id");
});
// ======================================================================
// Reachability drift tables (scanner schema, migration 010)
// ======================================================================
modelBuilder.Entity<CodeChangeEntity>(entity =>
{
entity.ToTable("code_changes", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.TenantId).HasColumnName("tenant_id");
entity.Property(e => e.ScanId).HasColumnName("scan_id");
entity.Property(e => e.BaseScanId).HasColumnName("base_scan_id");
entity.Property(e => e.Language).HasColumnName("language");
entity.Property(e => e.NodeId).HasColumnName("node_id");
entity.Property(e => e.File).HasColumnName("file");
entity.Property(e => e.Symbol).HasColumnName("symbol");
entity.Property(e => e.ChangeKind).HasColumnName("change_kind");
entity.Property(e => e.Details).HasColumnName("details").HasColumnType("jsonb");
entity.Property(e => e.DetectedAt).HasColumnName("detected_at").HasDefaultValueSql("NOW()");
entity.HasIndex(e => new { e.TenantId, e.ScanId, e.BaseScanId, e.Language, e.Symbol, e.ChangeKind })
.IsUnique()
.HasDatabaseName("code_changes_unique");
entity.HasIndex(e => new { e.TenantId, e.ScanId, e.BaseScanId, e.Language })
.HasDatabaseName("idx_code_changes_tenant_scan");
entity.HasIndex(e => e.Symbol).HasDatabaseName("idx_code_changes_symbol");
entity.HasIndex(e => e.ChangeKind).HasDatabaseName("idx_code_changes_kind");
});
modelBuilder.Entity<ReachabilityDriftResultEntity>(entity =>
{
entity.ToTable("reachability_drift_results", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.TenantId).HasColumnName("tenant_id");
entity.Property(e => e.BaseScanId).HasColumnName("base_scan_id");
entity.Property(e => e.HeadScanId).HasColumnName("head_scan_id");
entity.Property(e => e.Language).HasColumnName("language");
entity.Property(e => e.NewlyReachableCount).HasColumnName("newly_reachable_count").HasDefaultValue(0);
entity.Property(e => e.NewlyUnreachableCount).HasColumnName("newly_unreachable_count").HasDefaultValue(0);
entity.Property(e => e.DetectedAt).HasColumnName("detected_at").HasDefaultValueSql("NOW()");
entity.Property(e => e.ResultDigest).HasColumnName("result_digest");
entity.HasIndex(e => new { e.TenantId, e.BaseScanId, e.HeadScanId, e.Language, e.ResultDigest })
.IsUnique()
.HasDatabaseName("reachability_drift_unique");
entity.HasIndex(e => new { e.TenantId, e.HeadScanId, e.Language })
.HasDatabaseName("idx_reachability_drift_head");
entity.HasMany(e => e.DriftedSinks)
.WithOne(d => d.DriftResult)
.HasForeignKey(d => d.DriftResultId)
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity<DriftedSinkEntity>(entity =>
{
entity.ToTable("drifted_sinks", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.TenantId).HasColumnName("tenant_id");
entity.Property(e => e.DriftResultId).HasColumnName("drift_result_id");
entity.Property(e => e.SinkNodeId).HasColumnName("sink_node_id");
entity.Property(e => e.Symbol).HasColumnName("symbol");
entity.Property(e => e.SinkCategory).HasColumnName("sink_category");
entity.Property(e => e.Direction).HasColumnName("direction");
entity.Property(e => e.CauseKind).HasColumnName("cause_kind");
entity.Property(e => e.CauseDescription).HasColumnName("cause_description");
entity.Property(e => e.CauseSymbol).HasColumnName("cause_symbol");
entity.Property(e => e.CauseFile).HasColumnName("cause_file");
entity.Property(e => e.CauseLine).HasColumnName("cause_line");
entity.Property(e => e.CodeChangeId).HasColumnName("code_change_id");
entity.Property(e => e.CompressedPath).HasColumnName("compressed_path").HasColumnType("jsonb");
entity.Property(e => e.AssociatedVulns).HasColumnName("associated_vulns").HasColumnType("jsonb");
entity.HasIndex(e => new { e.DriftResultId, e.SinkNodeId })
.IsUnique()
.HasDatabaseName("drifted_sinks_unique");
entity.HasIndex(e => e.DriftResultId).HasDatabaseName("idx_drifted_sinks_drift");
entity.HasIndex(e => e.Direction).HasDatabaseName("idx_drifted_sinks_direction");
entity.HasIndex(e => e.SinkCategory).HasDatabaseName("idx_drifted_sinks_category");
});
// ======================================================================
// EPSS tables (scanner schema, migrations 011/012)
// ======================================================================
modelBuilder.Entity<EpssRawEntity>(entity =>
{
entity.ToTable("epss_raw", schema);
entity.HasKey(e => e.RawId);
entity.Property(e => e.RawId).HasColumnName("raw_id").UseIdentityAlwaysColumn();
entity.Property(e => e.SourceUri).HasColumnName("source_uri");
entity.Property(e => e.AsOfDate).HasColumnName("asof_date").HasColumnType("date");
entity.Property(e => e.IngestionTs).HasColumnName("ingestion_ts").HasDefaultValueSql("now()");
entity.Property(e => e.Payload).HasColumnName("payload").HasColumnType("jsonb");
entity.Property(e => e.PayloadSha256).HasColumnName("payload_sha256");
entity.Property(e => e.HeaderComment).HasColumnName("header_comment");
entity.Property(e => e.ModelVersion).HasColumnName("model_version");
entity.Property(e => e.PublishedDate).HasColumnName("published_date").HasColumnType("date");
entity.Property(e => e.RowCount).HasColumnName("row_count");
entity.Property(e => e.CompressedSize).HasColumnName("compressed_size");
entity.Property(e => e.DecompressedSize).HasColumnName("decompressed_size");
entity.Property(e => e.ImportRunId).HasColumnName("import_run_id");
entity.HasIndex(e => new { e.SourceUri, e.AsOfDate, e.PayloadSha256 })
.IsUnique()
.HasDatabaseName("epss_raw_unique");
entity.HasIndex(e => e.AsOfDate).IsDescending().HasDatabaseName("idx_epss_raw_asof");
entity.HasIndex(e => e.ModelVersion).HasDatabaseName("idx_epss_raw_model");
entity.HasIndex(e => e.ImportRunId).HasDatabaseName("idx_epss_raw_import_run");
});
modelBuilder.Entity<EpssSignalEntity>(entity =>
{
entity.ToTable("epss_signal", schema);
entity.HasKey(e => e.SignalId);
entity.Property(e => e.SignalId).HasColumnName("signal_id").UseIdentityAlwaysColumn();
entity.Property(e => e.TenantId).HasColumnName("tenant_id");
entity.Property(e => e.ModelDate).HasColumnName("model_date").HasColumnType("date");
entity.Property(e => e.CveId).HasColumnName("cve_id");
entity.Property(e => e.EventType).HasColumnName("event_type");
entity.Property(e => e.RiskBand).HasColumnName("risk_band");
entity.Property(e => e.EpssScore).HasColumnName("epss_score");
entity.Property(e => e.EpssDelta).HasColumnName("epss_delta");
entity.Property(e => e.Percentile).HasColumnName("percentile");
entity.Property(e => e.PercentileDelta).HasColumnName("percentile_delta");
entity.Property(e => e.IsModelChange).HasColumnName("is_model_change").HasDefaultValue(false);
entity.Property(e => e.ModelVersion).HasColumnName("model_version");
entity.Property(e => e.DedupeKey).HasColumnName("dedupe_key");
entity.Property(e => e.ExplainHash).HasColumnName("explain_hash");
entity.Property(e => e.Payload).HasColumnName("payload").HasColumnType("jsonb");
entity.Property(e => e.CreatedAt).HasColumnName("created_at").HasDefaultValueSql("now()");
entity.HasIndex(e => new { e.TenantId, e.DedupeKey })
.IsUnique()
.HasDatabaseName("epss_signal_dedupe");
entity.HasIndex(e => new { e.TenantId, e.ModelDate })
.IsDescending(false, true)
.HasDatabaseName("idx_epss_signal_tenant_date");
entity.HasIndex(e => new { e.TenantId, e.CveId, e.ModelDate })
.IsDescending(false, false, true)
.HasDatabaseName("idx_epss_signal_tenant_cve");
entity.HasIndex(e => new { e.TenantId, e.EventType, e.ModelDate })
.IsDescending(false, false, true)
.HasDatabaseName("idx_epss_signal_event_type");
});
modelBuilder.Entity<EpssSignalConfigEntity>(entity =>
{
entity.ToTable("epss_signal_config", schema);
entity.HasKey(e => e.ConfigId);
entity.Property(e => e.ConfigId).HasColumnName("config_id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.TenantId).HasColumnName("tenant_id");
entity.Property(e => e.CriticalPercentile).HasColumnName("critical_percentile").HasDefaultValue(0.995);
entity.Property(e => e.HighPercentile).HasColumnName("high_percentile").HasDefaultValue(0.99);
entity.Property(e => e.MediumPercentile).HasColumnName("medium_percentile").HasDefaultValue(0.90);
entity.Property(e => e.BigJumpDelta).HasColumnName("big_jump_delta").HasDefaultValue(0.10);
entity.Property(e => e.SuppressOnModelChange).HasColumnName("suppress_on_model_change").HasDefaultValue(true);
entity.Property(e => e.EnabledEventTypes).HasColumnName("enabled_event_types");
entity.Property(e => e.CreatedAt).HasColumnName("created_at").HasDefaultValueSql("now()");
entity.Property(e => e.UpdatedAt).HasColumnName("updated_at").HasDefaultValueSql("now()");
entity.HasIndex(e => e.TenantId)
.IsUnique()
.HasDatabaseName("epss_signal_config_tenant_unique");
});
// ======================================================================
// VEX candidates (scanner schema, migration 005)
// ======================================================================
modelBuilder.Entity<VexCandidateEntity>(entity =>
{
entity.ToTable("vex_candidates", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.CandidateId).HasColumnName("candidate_id");
entity.Property(e => e.TenantId).HasColumnName("tenant_id");
entity.Property(e => e.VulnId).HasColumnName("vuln_id");
entity.Property(e => e.Purl).HasColumnName("purl");
entity.Property(e => e.ImageDigest).HasColumnName("image_digest");
entity.Property(e => e.SuggestedStatus).HasColumnName("suggested_status");
entity.Property(e => e.Justification).HasColumnName("justification");
entity.Property(e => e.Rationale).HasColumnName("rationale");
entity.Property(e => e.EvidenceLinks).HasColumnName("evidence_links").HasColumnType("jsonb");
entity.Property(e => e.Confidence).HasColumnName("confidence").HasColumnType("numeric(4,3)");
entity.Property(e => e.GeneratedAt).HasColumnName("generated_at").HasDefaultValueSql("NOW()");
entity.Property(e => e.ExpiresAt).HasColumnName("expires_at");
entity.Property(e => e.RequiresReview).HasColumnName("requires_review").HasDefaultValue(true);
entity.Property(e => e.ReviewAction).HasColumnName("review_action");
entity.Property(e => e.ReviewedBy).HasColumnName("reviewed_by");
entity.Property(e => e.ReviewedAt).HasColumnName("reviewed_at");
entity.Property(e => e.ReviewComment).HasColumnName("review_comment");
entity.Property(e => e.CreatedAt).HasColumnName("created_at").HasDefaultValueSql("NOW()");
entity.HasIndex(e => e.CandidateId)
.IsUnique()
.HasDatabaseName("vex_candidates_candidate_id_key");
entity.HasIndex(e => new { e.TenantId, e.ImageDigest })
.HasDatabaseName("idx_vex_candidates_tenant_image");
entity.HasIndex(e => e.ExpiresAt).HasDatabaseName("idx_vex_candidates_expires");
});
// ======================================================================
// Function-level proof tables (scanner schema, migration 019)
// ======================================================================
modelBuilder.Entity<FuncProofEntity>(entity =>
{
entity.ToTable("func_proof", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.ScanId).HasColumnName("scan_id");
entity.Property(e => e.ProofId).HasColumnName("proof_id");
entity.Property(e => e.BuildId).HasColumnName("build_id");
entity.Property(e => e.BuildIdType).HasColumnName("build_id_type");
entity.Property(e => e.FileSha256).HasColumnName("file_sha256");
entity.Property(e => e.BinaryFormat).HasColumnName("binary_format");
entity.Property(e => e.Architecture).HasColumnName("architecture");
entity.Property(e => e.IsStripped).HasColumnName("is_stripped").HasDefaultValue(false);
entity.Property(e => e.FunctionCount).HasColumnName("function_count").HasDefaultValue(0);
entity.Property(e => e.TraceCount).HasColumnName("trace_count").HasDefaultValue(0);
entity.Property(e => e.ProofContent).HasColumnName("proof_content").HasColumnType("jsonb");
entity.Property(e => e.CompressedContent).HasColumnName("compressed_content");
entity.Property(e => e.DsseEnvelopeId).HasColumnName("dsse_envelope_id");
entity.Property(e => e.OciArtifactDigest).HasColumnName("oci_artifact_digest");
entity.Property(e => e.RekorEntryId).HasColumnName("rekor_entry_id");
entity.Property(e => e.GeneratorVersion).HasColumnName("generator_version");
entity.Property(e => e.GeneratedAtUtc).HasColumnName("generated_at_utc");
entity.Property(e => e.CreatedAtUtc).HasColumnName("created_at_utc").HasDefaultValueSql("NOW()");
entity.Property(e => e.UpdatedAtUtc).HasColumnName("updated_at_utc");
entity.HasIndex(e => e.ProofId).IsUnique().HasDatabaseName("idx_func_proof_proof_id");
entity.HasIndex(e => e.BuildId).HasDatabaseName("idx_func_proof_build_id");
entity.HasIndex(e => e.FileSha256).HasDatabaseName("idx_func_proof_file_sha256");
entity.HasIndex(e => e.ScanId).HasDatabaseName("idx_func_proof_scan_id");
entity.HasIndex(e => new { e.BuildId, e.Architecture }).HasDatabaseName("idx_func_proof_build_arch");
entity.HasMany(e => e.Nodes)
.WithOne(n => n.FuncProof)
.HasForeignKey(n => n.FuncProofId)
.OnDelete(DeleteBehavior.Cascade);
entity.HasMany(e => e.Traces)
.WithOne(t => t.FuncProof)
.HasForeignKey(t => t.FuncProofId)
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity<FuncNodeEntity>(entity =>
{
entity.ToTable("func_node", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.FuncProofId).HasColumnName("func_proof_id");
entity.Property(e => e.Symbol).HasColumnName("symbol");
entity.Property(e => e.SymbolDigest).HasColumnName("symbol_digest");
entity.Property(e => e.StartAddress).HasColumnName("start_address");
entity.Property(e => e.EndAddress).HasColumnName("end_address");
entity.Property(e => e.FunctionHash).HasColumnName("function_hash");
entity.Property(e => e.Confidence).HasColumnName("confidence").HasDefaultValue(1.0);
entity.Property(e => e.IsEntrypoint).HasColumnName("is_entrypoint").HasDefaultValue(false);
entity.Property(e => e.EntrypointType).HasColumnName("entrypoint_type");
entity.Property(e => e.IsSink).HasColumnName("is_sink").HasDefaultValue(false);
entity.Property(e => e.SinkVulnId).HasColumnName("sink_vuln_id");
entity.Property(e => e.SourceFile).HasColumnName("source_file");
entity.Property(e => e.SourceLine).HasColumnName("source_line");
entity.Property(e => e.CreatedAtUtc).HasColumnName("created_at_utc").HasDefaultValueSql("NOW()");
entity.HasIndex(e => e.SymbolDigest).HasDatabaseName("idx_func_node_symbol_digest");
entity.HasIndex(e => e.FuncProofId).HasDatabaseName("idx_func_node_proof_id");
entity.HasIndex(e => e.Symbol).HasDatabaseName("idx_func_node_symbol");
});
modelBuilder.Entity<FuncTraceEntity>(entity =>
{
entity.ToTable("func_trace", schema);
entity.HasKey(e => e.Id);
entity.Property(e => e.Id).HasColumnName("id").HasDefaultValueSql("gen_random_uuid()");
entity.Property(e => e.FuncProofId).HasColumnName("func_proof_id");
entity.Property(e => e.TraceId).HasColumnName("trace_id");
entity.Property(e => e.EdgeListHash).HasColumnName("edge_list_hash");
entity.Property(e => e.HopCount).HasColumnName("hop_count");
entity.Property(e => e.EntrySymbolDigest).HasColumnName("entry_symbol_digest");
entity.Property(e => e.SinkSymbolDigest).HasColumnName("sink_symbol_digest");
entity.Property(e => e.Path).HasColumnName("path");
entity.Property(e => e.Truncated).HasColumnName("truncated").HasDefaultValue(false);
entity.Property(e => e.CreatedAtUtc).HasColumnName("created_at_utc").HasDefaultValueSql("NOW()");
entity.HasIndex(e => e.FuncProofId).HasDatabaseName("idx_func_trace_proof_id");
entity.HasIndex(e => e.EntrySymbolDigest).HasDatabaseName("idx_func_trace_entry_digest");
entity.HasIndex(e => e.SinkSymbolDigest).HasDatabaseName("idx_func_trace_sink_digest");
entity.HasIndex(e => e.EdgeListHash).HasDatabaseName("idx_func_trace_edge_hash");
});
// ======================================================================
// Facet seals (scanner schema)
// ======================================================================
modelBuilder.Entity<FacetSealEntity>(entity =>
{
entity.ToTable("facet_seals", schema);
entity.HasKey(e => e.CombinedMerkleRoot);
entity.Property(e => e.CombinedMerkleRoot).HasColumnName("combined_merkle_root");
entity.Property(e => e.ImageDigest).HasColumnName("image_digest");
entity.Property(e => e.SchemaVersion).HasColumnName("schema_version");
entity.Property(e => e.CreatedAt).HasColumnName("created_at");
entity.Property(e => e.BuildAttestationRef).HasColumnName("build_attestation_ref");
entity.Property(e => e.Signature).HasColumnName("signature");
entity.Property(e => e.SigningKeyId).HasColumnName("signing_key_id");
entity.Property(e => e.SealContent).HasColumnName("seal_content").HasColumnType("jsonb");
entity.HasIndex(e => e.ImageDigest).HasDatabaseName("idx_facet_seals_image_digest");
entity.HasIndex(e => new { e.ImageDigest, e.CreatedAt })
.IsDescending(false, true)
.HasDatabaseName("idx_facet_seals_image_created");
});
OnModelCreatingPartial(modelBuilder);
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class CodeChangeEntity
{
public Guid Id { get; set; }
public Guid TenantId { get; set; }
public string ScanId { get; set; } = null!;
public string BaseScanId { get; set; } = null!;
public string Language { get; set; } = null!;
public string? NodeId { get; set; }
public string File { get; set; } = null!;
public string Symbol { get; set; } = null!;
public string ChangeKind { get; set; } = null!;
public string? Details { get; set; }
public DateTimeOffset DetectedAt { get; set; }
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class DriftedSinkEntity
{
public Guid Id { get; set; }
public Guid TenantId { get; set; }
public Guid DriftResultId { get; set; }
public string SinkNodeId { get; set; } = null!;
public string Symbol { get; set; } = null!;
public string SinkCategory { get; set; } = null!;
public string Direction { get; set; } = null!;
public string CauseKind { get; set; } = null!;
public string CauseDescription { get; set; } = null!;
public string? CauseSymbol { get; set; }
public string? CauseFile { get; set; }
public int? CauseLine { get; set; }
public Guid? CodeChangeId { get; set; }
public string CompressedPath { get; set; } = null!;
public string? AssociatedVulns { get; set; }
public ReachabilityDriftResultEntity DriftResult { get; set; } = null!;
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class EpssRawEntity
{
public long RawId { get; set; }
public string SourceUri { get; set; } = null!;
public DateOnly AsOfDate { get; set; }
public DateTimeOffset IngestionTs { get; set; }
public string Payload { get; set; } = null!;
public byte[] PayloadSha256 { get; set; } = null!;
public string? HeaderComment { get; set; }
public string? ModelVersion { get; set; }
public DateOnly? PublishedDate { get; set; }
public int RowCount { get; set; }
public long? CompressedSize { get; set; }
public long? DecompressedSize { get; set; }
public Guid? ImportRunId { get; set; }
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class EpssSignalConfigEntity
{
public Guid ConfigId { get; set; }
public Guid TenantId { get; set; }
public double CriticalPercentile { get; set; }
public double HighPercentile { get; set; }
public double MediumPercentile { get; set; }
public double BigJumpDelta { get; set; }
public bool SuppressOnModelChange { get; set; }
public string[] EnabledEventTypes { get; set; } = [];
public DateTimeOffset CreatedAt { get; set; }
public DateTimeOffset UpdatedAt { get; set; }
}

View File

@@ -0,0 +1,21 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class EpssSignalEntity
{
public long SignalId { get; set; }
public Guid TenantId { get; set; }
public DateOnly ModelDate { get; set; }
public string CveId { get; set; } = null!;
public string EventType { get; set; } = null!;
public string? RiskBand { get; set; }
public double? EpssScore { get; set; }
public double? EpssDelta { get; set; }
public double? Percentile { get; set; }
public double? PercentileDelta { get; set; }
public bool IsModelChange { get; set; }
public string? ModelVersion { get; set; }
public string DedupeKey { get; set; } = null!;
public byte[] ExplainHash { get; set; } = null!;
public string Payload { get; set; } = null!;
public DateTimeOffset CreatedAt { get; set; }
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class FacetSealEntity
{
public string CombinedMerkleRoot { get; set; } = null!;
public string ImageDigest { get; set; } = null!;
public int SchemaVersion { get; set; }
public DateTimeOffset CreatedAt { get; set; }
public string? BuildAttestationRef { get; set; }
public string? Signature { get; set; }
public string? SigningKeyId { get; set; }
public string SealContent { get; set; } = null!;
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class FuncNodeEntity
{
public Guid Id { get; set; }
public Guid FuncProofId { get; set; }
public string Symbol { get; set; } = null!;
public string SymbolDigest { get; set; } = null!;
public long StartAddress { get; set; }
public long EndAddress { get; set; }
public string FunctionHash { get; set; } = null!;
public double Confidence { get; set; }
public bool IsEntrypoint { get; set; }
public string? EntrypointType { get; set; }
public bool IsSink { get; set; }
public string? SinkVulnId { get; set; }
public string? SourceFile { get; set; }
public int? SourceLine { get; set; }
public DateTimeOffset CreatedAtUtc { get; set; }
public FuncProofEntity FuncProof { get; set; } = null!;
}

View File

@@ -0,0 +1,28 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class FuncProofEntity
{
public Guid Id { get; set; }
public Guid ScanId { get; set; }
public string ProofId { get; set; } = null!;
public string BuildId { get; set; } = null!;
public string BuildIdType { get; set; } = null!;
public string FileSha256 { get; set; } = null!;
public string BinaryFormat { get; set; } = null!;
public string Architecture { get; set; } = null!;
public bool IsStripped { get; set; }
public int FunctionCount { get; set; }
public int TraceCount { get; set; }
public string ProofContent { get; set; } = null!;
public byte[]? CompressedContent { get; set; }
public string? DsseEnvelopeId { get; set; }
public string? OciArtifactDigest { get; set; }
public string? RekorEntryId { get; set; }
public string GeneratorVersion { get; set; } = null!;
public DateTimeOffset GeneratedAtUtc { get; set; }
public DateTimeOffset CreatedAtUtc { get; set; }
public DateTimeOffset? UpdatedAtUtc { get; set; }
public ICollection<FuncNodeEntity> Nodes { get; set; } = new List<FuncNodeEntity>();
public ICollection<FuncTraceEntity> Traces { get; set; } = new List<FuncTraceEntity>();
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class FuncTraceEntity
{
public Guid Id { get; set; }
public Guid FuncProofId { get; set; }
public string TraceId { get; set; } = null!;
public string EdgeListHash { get; set; } = null!;
public int HopCount { get; set; }
public string EntrySymbolDigest { get; set; } = null!;
public string SinkSymbolDigest { get; set; } = null!;
public string[] Path { get; set; } = [];
public bool Truncated { get; set; }
public DateTimeOffset CreatedAtUtc { get; set; }
public FuncProofEntity FuncProof { get; set; } = null!;
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class ReachabilityDriftResultEntity
{
public Guid Id { get; set; }
public Guid TenantId { get; set; }
public string BaseScanId { get; set; } = null!;
public string HeadScanId { get; set; } = null!;
public string Language { get; set; } = null!;
public int NewlyReachableCount { get; set; }
public int NewlyUnreachableCount { get; set; }
public DateTimeOffset DetectedAt { get; set; }
public string ResultDigest { get; set; } = null!;
public ICollection<DriftedSinkEntity> DriftedSinks { get; set; } = new List<DriftedSinkEntity>();
}

View File

@@ -0,0 +1,24 @@
namespace StellaOps.Scanner.Storage.EfCore.Models;
public class VexCandidateEntity
{
public Guid Id { get; set; }
public string CandidateId { get; set; } = null!;
public Guid TenantId { get; set; }
public string VulnId { get; set; } = null!;
public string Purl { get; set; } = null!;
public string ImageDigest { get; set; } = null!;
public string SuggestedStatus { get; set; } = null!;
public string Justification { get; set; } = null!;
public string Rationale { get; set; } = null!;
public string EvidenceLinks { get; set; } = null!;
public decimal Confidence { get; set; }
public DateTimeOffset GeneratedAt { get; set; }
public DateTimeOffset ExpiresAt { get; set; }
public bool RequiresReview { get; set; }
public string? ReviewAction { get; set; }
public string? ReviewedBy { get; set; }
public DateTimeOffset? ReviewedAt { get; set; }
public string? ReviewComment { get; set; }
public DateTimeOffset CreatedAt { get; set; }
}

View File

@@ -11,6 +11,7 @@ ALTER TABLE scanner.runtime_observations
ADD COLUMN IF NOT EXISTS observation_id TEXT,
ADD COLUMN IF NOT EXISTS node_hash TEXT,
ADD COLUMN IF NOT EXISTS function_name TEXT,
ADD COLUMN IF NOT EXISTS container_id TEXT,
ADD COLUMN IF NOT EXISTS pod_name TEXT,
ADD COLUMN IF NOT EXISTS namespace TEXT,
ADD COLUMN IF NOT EXISTS probe_type TEXT,

View File

@@ -212,8 +212,16 @@ public sealed class PostgresArtifactBomRepository : IArtifactBomRepository
canonical_bom_sha256 AS "CanonicalBomSha256",
payload_digest AS "PayloadDigest",
inserted_at AS "InsertedAt",
NULL::text AS "RawBomRef",
NULL::text AS "CanonicalBomRef",
NULL::text AS "DsseEnvelopeRef",
NULL::text AS "MergedVexRef",
NULL::text AS "CanonicalBomJson",
NULL::text AS "MergedVexJson",
NULL::text AS "AttestationsJson",
evidence_score AS "EvidenceScore",
rekor_tile_id AS "RekorTileId"
rekor_tile_id AS "RekorTileId",
NULL::text AS "PendingMergedVexJson"
FROM {TableName}
WHERE payload_digest = @p0
ORDER BY inserted_at DESC, build_id ASC
@@ -246,7 +254,16 @@ public sealed class PostgresArtifactBomRepository : IArtifactBomRepository
canonical_bom_sha256 AS "CanonicalBomSha256",
payload_digest AS "PayloadDigest",
inserted_at AS "InsertedAt",
evidence_score AS "EvidenceScore"
NULL::text AS "RawBomRef",
NULL::text AS "CanonicalBomRef",
NULL::text AS "DsseEnvelopeRef",
NULL::text AS "MergedVexRef",
NULL::text AS "CanonicalBomJson",
NULL::text AS "MergedVexJson",
NULL::text AS "AttestationsJson",
evidence_score AS "EvidenceScore",
NULL::text AS "RekorTileId",
NULL::text AS "PendingMergedVexJson"
FROM {TableName}
WHERE jsonb_path_exists(
canonical_bom,
@@ -289,7 +306,16 @@ public sealed class PostgresArtifactBomRepository : IArtifactBomRepository
canonical_bom_sha256 AS "CanonicalBomSha256",
payload_digest AS "PayloadDigest",
inserted_at AS "InsertedAt",
evidence_score AS "EvidenceScore"
NULL::text AS "RawBomRef",
NULL::text AS "CanonicalBomRef",
NULL::text AS "DsseEnvelopeRef",
NULL::text AS "MergedVexRef",
NULL::text AS "CanonicalBomJson",
NULL::text AS "MergedVexJson",
NULL::text AS "AttestationsJson",
evidence_score AS "EvidenceScore",
NULL::text AS "RekorTileId",
NULL::text AS "PendingMergedVexJson"
FROM {TableName}
WHERE jsonb_path_exists(
canonical_bom,
@@ -333,7 +359,15 @@ public sealed class PostgresArtifactBomRepository : IArtifactBomRepository
canonical_bom_sha256 AS "CanonicalBomSha256",
payload_digest AS "PayloadDigest",
inserted_at AS "InsertedAt",
NULL::text AS "RawBomRef",
NULL::text AS "CanonicalBomRef",
NULL::text AS "DsseEnvelopeRef",
NULL::text AS "MergedVexRef",
NULL::text AS "CanonicalBomJson",
NULL::text AS "MergedVexJson",
NULL::text AS "AttestationsJson",
evidence_score AS "EvidenceScore",
NULL::text AS "RekorTileId",
jsonb_path_query_array(merged_vex, @p0::jsonpath)::text AS "PendingMergedVexJson"
FROM {TableName}
WHERE jsonb_path_exists(merged_vex, @p0::jsonpath)

View File

@@ -84,7 +84,7 @@ public sealed class PostgresCallGraphSnapshotRepository : ICallGraphSnapshotRepo
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT snapshot_json
SELECT snapshot_json AS "Value"
FROM {CallGraphSnapshotsTable}
WHERE tenant_id = @p0 AND scan_id = @p1 AND language = @p2
ORDER BY extracted_at DESC

View File

@@ -7,6 +7,7 @@
using Microsoft.EntityFrameworkCore;
using Npgsql;
using StellaOps.Scanner.Storage.EfCore.Models;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
@@ -79,26 +80,16 @@ public sealed class PostgresEpssRawRepository : IEpssRawRepository
public async Task<EpssRaw?> GetByDateAsync(DateOnly asOfDate, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
raw_id, source_uri, asof_date, ingestion_ts,
payload, payload_sha256, header_comment, model_version, published_date,
row_count, compressed_size, decompressed_size, import_run_id
FROM {RawTable}
WHERE asof_date = @p0
ORDER BY ingestion_ts DESC
LIMIT 1
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var row = await dbContext.Database.SqlQueryRaw<RawRow>(
sql, asOfDate.ToDateTime(TimeOnly.MinValue))
var entity = await dbContext.EpssRaw
.Where(e => e.AsOfDate == asOfDate)
.OrderByDescending(e => e.IngestionTs)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return row is not null && row.raw_id != 0 ? MapToRaw(row) : null;
return entity is not null ? MapEntityToRaw(entity) : null;
}
public async Task<IReadOnlyList<EpssRaw>> GetByDateRangeAsync(
@@ -106,64 +97,40 @@ public sealed class PostgresEpssRawRepository : IEpssRawRepository
DateOnly endDate,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
raw_id, source_uri, asof_date, ingestion_ts,
payload, payload_sha256, header_comment, model_version, published_date,
row_count, compressed_size, decompressed_size, import_run_id
FROM {RawTable}
WHERE asof_date >= @p0 AND asof_date <= @p1
ORDER BY asof_date DESC
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<RawRow>(
sql, startDate.ToDateTime(TimeOnly.MinValue), endDate.ToDateTime(TimeOnly.MinValue))
var entities = await dbContext.EpssRaw
.Where(e => e.AsOfDate >= startDate && e.AsOfDate <= endDate)
.OrderByDescending(e => e.AsOfDate)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return rows.Select(MapToRaw).ToList();
return entities.Select(MapEntityToRaw).ToList();
}
public async Task<EpssRaw?> GetLatestAsync(CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
raw_id, source_uri, asof_date, ingestion_ts,
payload, payload_sha256, header_comment, model_version, published_date,
row_count, compressed_size, decompressed_size, import_run_id
FROM {RawTable}
ORDER BY asof_date DESC, ingestion_ts DESC
LIMIT 1
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var row = await dbContext.Database.SqlQueryRaw<RawRow>(sql)
var entity = await dbContext.EpssRaw
.OrderByDescending(e => e.AsOfDate)
.ThenByDescending(e => e.IngestionTs)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return row is not null && row.raw_id != 0 ? MapToRaw(row) : null;
return entity is not null ? MapEntityToRaw(entity) : null;
}
public async Task<bool> ExistsAsync(DateOnly asOfDate, byte[] payloadSha256, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT CAST(CASE WHEN EXISTS (
SELECT 1 FROM {RawTable}
WHERE asof_date = $1 AND payload_sha256 = $2
) THEN 1 ELSE 0 END AS integer)
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await using var cmd = new NpgsqlCommand(sql, connection);
cmd.Parameters.AddWithValue(asOfDate.ToDateTime(TimeOnly.MinValue));
cmd.Parameters.AddWithValue(payloadSha256);
var result = await cmd.ExecuteScalarAsync(cancellationToken).ConfigureAwait(false);
return Convert.ToInt32(result) == 1;
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
return await dbContext.EpssRaw
.AnyAsync(e => e.AsOfDate == asOfDate && e.PayloadSha256 == payloadSha256, cancellationToken)
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<EpssRaw>> GetByModelVersionAsync(
@@ -171,26 +138,17 @@ public sealed class PostgresEpssRawRepository : IEpssRawRepository
int limit = 100,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
raw_id, source_uri, asof_date, ingestion_ts,
payload, payload_sha256, header_comment, model_version, published_date,
row_count, compressed_size, decompressed_size, import_run_id
FROM {RawTable}
WHERE model_version = @p0
ORDER BY asof_date DESC
LIMIT @p1
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<RawRow>(
sql, modelVersion, limit)
var entities = await dbContext.EpssRaw
.Where(e => e.ModelVersion == modelVersion)
.OrderByDescending(e => e.AsOfDate)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return rows.Select(MapToRaw).ToList();
return entities.Select(MapEntityToRaw).ToList();
}
public async Task<int> PruneAsync(int retentionDays = 365, CancellationToken cancellationToken = default)
@@ -204,40 +162,23 @@ public sealed class PostgresEpssRawRepository : IEpssRawRepository
return Convert.ToInt32(result);
}
private static EpssRaw MapToRaw(RawRow row)
private static EpssRaw MapEntityToRaw(EpssRawEntity entity)
{
return new EpssRaw
{
RawId = row.raw_id,
SourceUri = row.source_uri,
AsOfDate = DateOnly.FromDateTime(row.asof_date),
IngestionTs = row.ingestion_ts,
Payload = row.payload,
PayloadSha256 = row.payload_sha256,
HeaderComment = row.header_comment,
ModelVersion = row.model_version,
PublishedDate = row.published_date.HasValue ? DateOnly.FromDateTime(row.published_date.Value) : null,
RowCount = row.row_count,
CompressedSize = row.compressed_size,
DecompressedSize = row.decompressed_size,
ImportRunId = row.import_run_id
RawId = entity.RawId,
SourceUri = entity.SourceUri,
AsOfDate = entity.AsOfDate,
IngestionTs = entity.IngestionTs,
Payload = entity.Payload,
PayloadSha256 = entity.PayloadSha256,
HeaderComment = entity.HeaderComment,
ModelVersion = entity.ModelVersion,
PublishedDate = entity.PublishedDate,
RowCount = entity.RowCount,
CompressedSize = entity.CompressedSize,
DecompressedSize = entity.DecompressedSize,
ImportRunId = entity.ImportRunId
};
}
private sealed class RawRow
{
public long raw_id { get; set; }
public string source_uri { get; set; } = "";
public DateTime asof_date { get; set; }
public DateTimeOffset ingestion_ts { get; set; }
public string payload { get; set; } = "";
public byte[] payload_sha256 { get; set; } = [];
public string? header_comment { get; set; }
public string? model_version { get; set; }
public DateTime? published_date { get; set; }
public int row_count { get; set; }
public long? compressed_size { get; set; }
public long? decompressed_size { get; set; }
public Guid? import_run_id { get; set; }
}
}

View File

@@ -8,6 +8,7 @@
using Microsoft.EntityFrameworkCore;
using Npgsql;
using StellaOps.Scanner.Storage.EfCore.Models;
using StellaOps.Scanner.Storage.Repositories;
using System.Text.Json;
@@ -150,44 +151,27 @@ public sealed class PostgresEpssSignalRepository : IEpssSignalRepository
var eventTypeList = eventTypes?.ToList();
var hasEventTypeFilter = eventTypeList?.Count > 0;
var paramList = new List<object>
{
tenantId,
startDate.ToDateTime(TimeOnly.MinValue),
endDate.ToDateTime(TimeOnly.MinValue)
};
var paramIndex = 3;
var eventTypeClause = "";
if (hasEventTypeFilter)
{
eventTypeClause = $"AND event_type = ANY(@p{paramIndex})";
paramList.Add(eventTypeList!.ToArray());
}
var sql = $"""
SELECT
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
epss_score, epss_delta, percentile, percentile_delta,
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
FROM {SignalTable}
WHERE tenant_id = @p0
AND model_date >= @p1
AND model_date <= @p2
{eventTypeClause}
ORDER BY model_date DESC, created_at DESC
LIMIT 10000
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId.ToString("D"), cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<SignalRow>(
sql, paramList.ToArray())
IQueryable<EpssSignalEntity> query = dbContext.EpssSignals
.Where(e => e.TenantId == tenantId
&& e.ModelDate >= startDate
&& e.ModelDate <= endDate);
if (hasEventTypeFilter)
{
query = query.Where(e => eventTypeList!.Contains(e.EventType));
}
var entities = await query
.OrderByDescending(e => e.ModelDate)
.ThenByDescending(e => e.CreatedAt)
.Take(10000)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return rows.Select(MapToSignal).ToList();
return entities.Select(MapEntityToSignal).ToList();
}
public async Task<IReadOnlyList<EpssSignal>> GetByCveAsync(
@@ -196,27 +180,18 @@ public sealed class PostgresEpssSignalRepository : IEpssSignalRepository
int limit = 100,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
epss_score, epss_delta, percentile, percentile_delta,
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
FROM {SignalTable}
WHERE tenant_id = @p0
AND cve_id = @p1
ORDER BY model_date DESC, created_at DESC
LIMIT @p2
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId.ToString("D"), cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<SignalRow>(
sql, tenantId, cveId, limit)
var entities = await dbContext.EpssSignals
.Where(e => e.TenantId == tenantId && e.CveId == cveId)
.OrderByDescending(e => e.ModelDate)
.ThenByDescending(e => e.CreatedAt)
.Take(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return rows.Select(MapToSignal).ToList();
return entities.Select(MapEntityToSignal).ToList();
}
public async Task<IReadOnlyList<EpssSignal>> GetHighPriorityAsync(
@@ -225,52 +200,35 @@ public sealed class PostgresEpssSignalRepository : IEpssSignalRepository
DateOnly endDate,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
epss_score, epss_delta, percentile, percentile_delta,
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
FROM {SignalTable}
WHERE tenant_id = @p0
AND model_date >= @p1
AND model_date <= @p2
AND risk_band IN ('CRITICAL', 'HIGH')
ORDER BY model_date DESC, created_at DESC
LIMIT 10000
""";
string[] highPriorityBands = ["CRITICAL", "HIGH"];
await using var connection = await _dataSource.OpenConnectionAsync(tenantId.ToString("D"), cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<SignalRow>(
sql, tenantId, startDate.ToDateTime(TimeOnly.MinValue), endDate.ToDateTime(TimeOnly.MinValue))
var entities = await dbContext.EpssSignals
.Where(e => e.TenantId == tenantId
&& e.ModelDate >= startDate
&& e.ModelDate <= endDate
&& e.RiskBand != null && highPriorityBands.Contains(e.RiskBand))
.OrderByDescending(e => e.ModelDate)
.ThenByDescending(e => e.CreatedAt)
.Take(10000)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return rows.Select(MapToSignal).ToList();
return entities.Select(MapEntityToSignal).ToList();
}
public async Task<EpssSignalConfig?> GetConfigAsync(Guid tenantId, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
config_id, tenant_id,
critical_percentile, high_percentile, medium_percentile,
big_jump_delta, suppress_on_model_change, enabled_event_types,
created_at, updated_at
FROM {ConfigTable}
WHERE tenant_id = @p0
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId.ToString("D"), cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var row = await dbContext.Database.SqlQueryRaw<ConfigRow>(
sql, tenantId)
.FirstOrDefaultAsync(cancellationToken)
var entity = await dbContext.EpssSignalConfigs
.FirstOrDefaultAsync(e => e.TenantId == tenantId, cancellationToken)
.ConfigureAwait(false);
return row is not null && row.config_id != Guid.Empty ? MapToConfig(row) : null;
return entity is not null ? MapEntityToConfig(entity) : null;
}
public async Task<EpssSignalConfig> UpsertConfigAsync(EpssSignalConfig config, CancellationToken cancellationToken = default)
@@ -331,97 +289,53 @@ public sealed class PostgresEpssSignalRepository : IEpssSignalRepository
private async Task<EpssSignal?> GetByDedupeKeyAsync(Guid tenantId, string dedupeKey, CancellationToken cancellationToken)
{
var sql = $"""
SELECT
signal_id, tenant_id, model_date, cve_id, event_type, risk_band,
epss_score, epss_delta, percentile, percentile_delta,
is_model_change, model_version, dedupe_key, explain_hash, payload, created_at
FROM {SignalTable}
WHERE tenant_id = @p0 AND dedupe_key = @p1
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantId.ToString("D"), cancellationToken);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var row = await dbContext.Database.SqlQueryRaw<SignalRow>(
sql, tenantId, dedupeKey)
.FirstOrDefaultAsync(cancellationToken)
var entity = await dbContext.EpssSignals
.FirstOrDefaultAsync(e => e.TenantId == tenantId && e.DedupeKey == dedupeKey, cancellationToken)
.ConfigureAwait(false);
return row is not null && row.signal_id != 0 ? MapToSignal(row) : null;
return entity is not null ? MapEntityToSignal(entity) : null;
}
private static EpssSignal MapToSignal(SignalRow row)
private static EpssSignal MapEntityToSignal(EpssSignalEntity entity)
{
return new EpssSignal
{
SignalId = row.signal_id,
TenantId = row.tenant_id,
ModelDate = row.model_date,
CveId = row.cve_id,
EventType = row.event_type,
RiskBand = row.risk_band,
EpssScore = row.epss_score,
EpssDelta = row.epss_delta,
Percentile = row.percentile,
PercentileDelta = row.percentile_delta,
IsModelChange = row.is_model_change,
ModelVersion = row.model_version,
DedupeKey = row.dedupe_key,
ExplainHash = row.explain_hash,
Payload = row.payload,
CreatedAt = row.created_at
SignalId = entity.SignalId,
TenantId = entity.TenantId,
ModelDate = entity.ModelDate,
CveId = entity.CveId,
EventType = entity.EventType,
RiskBand = entity.RiskBand,
EpssScore = entity.EpssScore,
EpssDelta = entity.EpssDelta,
Percentile = entity.Percentile,
PercentileDelta = entity.PercentileDelta,
IsModelChange = entity.IsModelChange,
ModelVersion = entity.ModelVersion,
DedupeKey = entity.DedupeKey,
ExplainHash = entity.ExplainHash,
Payload = entity.Payload,
CreatedAt = entity.CreatedAt
};
}
private static EpssSignalConfig MapToConfig(ConfigRow row)
private static EpssSignalConfig MapEntityToConfig(EpssSignalConfigEntity entity)
{
return new EpssSignalConfig
{
ConfigId = row.config_id,
TenantId = row.tenant_id,
CriticalPercentile = row.critical_percentile,
HighPercentile = row.high_percentile,
MediumPercentile = row.medium_percentile,
BigJumpDelta = row.big_jump_delta,
SuppressOnModelChange = row.suppress_on_model_change,
EnabledEventTypes = row.enabled_event_types ?? Array.Empty<string>(),
CreatedAt = row.created_at,
UpdatedAt = row.updated_at
ConfigId = entity.ConfigId,
TenantId = entity.TenantId,
CriticalPercentile = entity.CriticalPercentile,
HighPercentile = entity.HighPercentile,
MediumPercentile = entity.MediumPercentile,
BigJumpDelta = entity.BigJumpDelta,
SuppressOnModelChange = entity.SuppressOnModelChange,
EnabledEventTypes = entity.EnabledEventTypes ?? [],
CreatedAt = entity.CreatedAt,
UpdatedAt = entity.UpdatedAt
};
}
private sealed class SignalRow
{
public long signal_id { get; set; }
public Guid tenant_id { get; set; }
public DateOnly model_date { get; set; }
public string cve_id { get; set; } = "";
public string event_type { get; set; } = "";
public string? risk_band { get; set; }
public double? epss_score { get; set; }
public double? epss_delta { get; set; }
public double? percentile { get; set; }
public double? percentile_delta { get; set; }
public bool is_model_change { get; set; }
public string? model_version { get; set; }
public string dedupe_key { get; set; } = "";
public byte[] explain_hash { get; set; } = [];
public string payload { get; set; } = "";
public DateTimeOffset created_at { get; set; }
}
private sealed class ConfigRow
{
public Guid config_id { get; set; }
public Guid tenant_id { get; set; }
public double critical_percentile { get; set; }
public double high_percentile { get; set; }
public double medium_percentile { get; set; }
public double big_jump_delta { get; set; }
public bool suppress_on_model_change { get; set; }
public string[]? enabled_event_types { get; set; }
public DateTimeOffset created_at { get; set; }
public DateTimeOffset updated_at { get; set; }
}
}

View File

@@ -4,11 +4,13 @@
// Sprint: SPRINT_20260105_002_003_FACET (QTA-013)
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Facet;
using StellaOps.Facet.Serialization;
using StellaOps.Scanner.Storage.EfCore.Models;
using System.Collections.Immutable;
using System.Text.Json;
@@ -29,10 +31,7 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
private readonly ILogger<PostgresFacetSealStore> _logger;
private readonly TimeProvider _timeProvider;
private const string SelectColumns = """
combined_merkle_root, image_digest, schema_version, created_at,
build_attestation_ref, signature, signing_key_id, seal_content
""";
private const int DefaultCommandTimeoutSeconds = 30;
private const string InsertSql = """
INSERT INTO scanner.facet_seals (
@@ -44,40 +43,6 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
)
""";
private const string SelectLatestSql = $"""
SELECT {SelectColumns}
FROM scanner.facet_seals
WHERE image_digest = @image_digest
ORDER BY created_at DESC
LIMIT 1
""";
private const string SelectByCombinedRootSql = $"""
SELECT {SelectColumns}
FROM scanner.facet_seals
WHERE combined_merkle_root = @combined_merkle_root
""";
private const string SelectHistorySql = $"""
SELECT {SelectColumns}
FROM scanner.facet_seals
WHERE image_digest = @image_digest
ORDER BY created_at DESC
LIMIT @limit
""";
private const string ExistsSql = """
SELECT EXISTS(
SELECT 1 FROM scanner.facet_seals
WHERE image_digest = @image_digest
)
""";
private const string DeleteByImageSql = """
DELETE FROM scanner.facet_seals
WHERE image_digest = @image_digest
""";
private const string PurgeSql = """
WITH ranked AS (
SELECT combined_merkle_root, image_digest, created_at,
@@ -116,16 +81,15 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(SelectLatestSql, conn);
cmd.Parameters.AddWithValue("image_digest", imageDigest);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
if (!await reader.ReadAsync(ct).ConfigureAwait(false))
{
return null;
}
var entity = await dbContext.FacetSeals
.Where(e => e.ImageDigest == imageDigest)
.OrderByDescending(e => e.CreatedAt)
.FirstOrDefaultAsync(ct)
.ConfigureAwait(false);
return MapSeal(reader);
return entity is not null ? MapEntityToSeal(entity) : null;
}
/// <inheritdoc/>
@@ -135,16 +99,13 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
ArgumentException.ThrowIfNullOrWhiteSpace(combinedMerkleRoot);
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(SelectByCombinedRootSql, conn);
cmd.Parameters.AddWithValue("combined_merkle_root", combinedMerkleRoot);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
if (!await reader.ReadAsync(ct).ConfigureAwait(false))
{
return null;
}
var entity = await dbContext.FacetSeals
.FirstOrDefaultAsync(e => e.CombinedMerkleRoot == combinedMerkleRoot, ct)
.ConfigureAwait(false);
return MapSeal(reader);
return entity is not null ? MapEntityToSeal(entity) : null;
}
/// <inheritdoc/>
@@ -158,18 +119,16 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
ArgumentOutOfRangeException.ThrowIfNegativeOrZero(limit);
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(SelectHistorySql, conn);
cmd.Parameters.AddWithValue("image_digest", imageDigest);
cmd.Parameters.AddWithValue("limit", limit);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
var seals = new List<FacetSeal>();
await using var reader = await cmd.ExecuteReaderAsync(ct).ConfigureAwait(false);
while (await reader.ReadAsync(ct).ConfigureAwait(false))
{
seals.Add(MapSeal(reader));
}
var entities = await dbContext.FacetSeals
.Where(e => e.ImageDigest == imageDigest)
.OrderByDescending(e => e.CreatedAt)
.Take(limit)
.ToListAsync(ct)
.ConfigureAwait(false);
return [.. seals];
return [.. entities.Select(MapEntityToSeal)];
}
/// <inheritdoc/>
@@ -214,11 +173,11 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(ExistsSql, conn);
cmd.Parameters.AddWithValue("image_digest", imageDigest);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
var result = await cmd.ExecuteScalarAsync(ct).ConfigureAwait(false);
return result is true;
return await dbContext.FacetSeals
.AnyAsync(e => e.ImageDigest == imageDigest, ct)
.ConfigureAwait(false);
}
/// <inheritdoc/>
@@ -228,10 +187,13 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
await using var conn = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await using var cmd = new NpgsqlCommand(DeleteByImageSql, conn);
cmd.Parameters.AddWithValue("image_digest", imageDigest);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
var deleted = await dbContext.FacetSeals
.Where(e => e.ImageDigest == imageDigest)
.ExecuteDeleteAsync(ct)
.ConfigureAwait(false);
var deleted = await cmd.ExecuteNonQueryAsync(ct).ConfigureAwait(false);
_logger.LogInformation("Deleted {Count} facet seal(s) for image {ImageDigest}",
deleted, imageDigest);
return deleted;
@@ -259,16 +221,14 @@ public sealed class PostgresFacetSealStore : IFacetSealStore
return purged;
}
private static FacetSeal MapSeal(NpgsqlDataReader reader)
private static FacetSeal MapEntityToSeal(FacetSealEntity entity)
{
// Read seal from JSONB column (index 7 is seal_content)
var sealJson = reader.GetString(7);
var seal = JsonSerializer.Deserialize<FacetSeal>(sealJson, FacetJsonOptions.Default);
var seal = JsonSerializer.Deserialize<FacetSeal>(entity.SealContent, FacetJsonOptions.Default);
if (seal is null)
{
throw new InvalidOperationException(
$"Failed to deserialize facet seal from database: {reader.GetString(0)}");
$"Failed to deserialize facet seal from database: {entity.CombinedMerkleRoot}");
}
return seal;

View File

@@ -6,9 +6,11 @@
// -----------------------------------------------------------------------------
using Microsoft.EntityFrameworkCore;
using Npgsql;
using NpgsqlTypes;
using StellaOps.Determinism;
using StellaOps.Scanner.Storage.EfCore.Models;
using StellaOps.Scanner.Storage.Entities;
using System.Text.Json;
@@ -69,6 +71,8 @@ public sealed class PostgresFuncProofRepository : IFuncProofRepository
private readonly TimeProvider _timeProvider;
private readonly IGuidProvider _guidProvider;
private const int DefaultCommandTimeoutSeconds = 30;
public PostgresFuncProofRepository(
NpgsqlDataSource dataSource,
TimeProvider? timeProvider = null,
@@ -135,106 +139,64 @@ public sealed class PostgresFuncProofRepository : IFuncProofRepository
public async Task<FuncProofDocumentRow?> GetByIdAsync(Guid id, CancellationToken ct = default)
{
const string sql = """
SELECT id, scan_id, proof_id, build_id, build_id_type,
file_sha256, binary_format, architecture, is_stripped,
function_count, trace_count, proof_content, compressed_content,
dsse_envelope_id, oci_artifact_digest, rekor_entry_id,
generator_version, generated_at_utc, created_at_utc, updated_at_utc
FROM scanner.func_proof
WHERE id = @id
""";
await using var conn = await _dataSource.OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("id", id);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
await using var reader = await cmd.ExecuteReaderAsync(ct);
return await reader.ReadAsync(ct) ? MapRow(reader) : null;
var entity = await dbContext.FuncProofs
.FirstOrDefaultAsync(e => e.Id == id, ct)
.ConfigureAwait(false);
return entity is not null ? MapEntityToRow(entity) : null;
}
public async Task<FuncProofDocumentRow?> GetByProofIdAsync(string proofId, CancellationToken ct = default)
{
const string sql = """
SELECT id, scan_id, proof_id, build_id, build_id_type,
file_sha256, binary_format, architecture, is_stripped,
function_count, trace_count, proof_content, compressed_content,
dsse_envelope_id, oci_artifact_digest, rekor_entry_id,
generator_version, generated_at_utc, created_at_utc, updated_at_utc
FROM scanner.func_proof
WHERE proof_id = @proof_id
""";
await using var conn = await _dataSource.OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("proof_id", proofId);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
await using var reader = await cmd.ExecuteReaderAsync(ct);
return await reader.ReadAsync(ct) ? MapRow(reader) : null;
var entity = await dbContext.FuncProofs
.FirstOrDefaultAsync(e => e.ProofId == proofId, ct)
.ConfigureAwait(false);
return entity is not null ? MapEntityToRow(entity) : null;
}
public async Task<IReadOnlyList<FuncProofDocumentRow>> GetByBuildIdAsync(string buildId, CancellationToken ct = default)
{
const string sql = """
SELECT id, scan_id, proof_id, build_id, build_id_type,
file_sha256, binary_format, architecture, is_stripped,
function_count, trace_count, proof_content, compressed_content,
dsse_envelope_id, oci_artifact_digest, rekor_entry_id,
generator_version, generated_at_utc, created_at_utc, updated_at_utc
FROM scanner.func_proof
WHERE build_id = @build_id
ORDER BY generated_at_utc DESC
""";
await using var conn = await _dataSource.OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("build_id", buildId);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
var results = new List<FuncProofDocumentRow>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
results.Add(MapRow(reader));
}
return results;
var entities = await dbContext.FuncProofs
.Where(e => e.BuildId == buildId)
.OrderByDescending(e => e.GeneratedAtUtc)
.ToListAsync(ct)
.ConfigureAwait(false);
return entities.Select(MapEntityToRow).ToList();
}
public async Task<IReadOnlyList<FuncProofDocumentRow>> GetByScanIdAsync(Guid scanId, CancellationToken ct = default)
{
const string sql = """
SELECT id, scan_id, proof_id, build_id, build_id_type,
file_sha256, binary_format, architecture, is_stripped,
function_count, trace_count, proof_content, compressed_content,
dsse_envelope_id, oci_artifact_digest, rekor_entry_id,
generator_version, generated_at_utc, created_at_utc, updated_at_utc
FROM scanner.func_proof
WHERE scan_id = @scan_id
ORDER BY build_id
""";
await using var conn = await _dataSource.OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("scan_id", scanId);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
var results = new List<FuncProofDocumentRow>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
results.Add(MapRow(reader));
}
return results;
var entities = await dbContext.FuncProofs
.Where(e => e.ScanId == scanId)
.OrderBy(e => e.BuildId)
.ToListAsync(ct)
.ConfigureAwait(false);
return entities.Select(MapEntityToRow).ToList();
}
public async Task<bool> ExistsAsync(string proofId, CancellationToken ct = default)
{
const string sql = "SELECT EXISTS(SELECT 1 FROM scanner.func_proof WHERE proof_id = @proof_id)";
await using var conn = await _dataSource.OpenConnectionAsync(ct);
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("proof_id", proofId);
await using var dbContext = ScannerDbContextFactory.Create(conn, DefaultCommandTimeoutSeconds, ScannerStorageDefaults.DefaultSchemaName);
var result = await cmd.ExecuteScalarAsync(ct);
return result is true;
return await dbContext.FuncProofs
.AnyAsync(e => e.ProofId == proofId, ct)
.ConfigureAwait(false);
}
public async Task UpdateSignatureInfoAsync(
@@ -266,30 +228,30 @@ public sealed class PostgresFuncProofRepository : IFuncProofRepository
await cmd.ExecuteNonQueryAsync(ct);
}
private static FuncProofDocumentRow MapRow(NpgsqlDataReader reader)
private static FuncProofDocumentRow MapEntityToRow(FuncProofEntity entity)
{
return new FuncProofDocumentRow
{
Id = reader.GetGuid(0),
ScanId = reader.GetGuid(1),
ProofId = reader.GetString(2),
BuildId = reader.GetString(3),
BuildIdType = reader.GetString(4),
FileSha256 = reader.GetString(5),
BinaryFormat = reader.GetString(6),
Architecture = reader.GetString(7),
IsStripped = reader.GetBoolean(8),
FunctionCount = reader.GetInt32(9),
TraceCount = reader.GetInt32(10),
ProofContent = reader.GetString(11),
CompressedContent = reader.IsDBNull(12) ? null : (byte[])reader.GetValue(12),
DsseEnvelopeId = reader.IsDBNull(13) ? null : reader.GetString(13),
OciArtifactDigest = reader.IsDBNull(14) ? null : reader.GetString(14),
RekorEntryId = reader.IsDBNull(15) ? null : reader.GetString(15),
GeneratorVersion = reader.GetString(16),
GeneratedAtUtc = reader.GetDateTime(17),
CreatedAtUtc = reader.GetDateTime(18),
UpdatedAtUtc = reader.IsDBNull(19) ? null : reader.GetDateTime(19)
Id = entity.Id,
ScanId = entity.ScanId,
ProofId = entity.ProofId,
BuildId = entity.BuildId,
BuildIdType = entity.BuildIdType,
FileSha256 = entity.FileSha256,
BinaryFormat = entity.BinaryFormat,
Architecture = entity.Architecture,
IsStripped = entity.IsStripped,
FunctionCount = entity.FunctionCount,
TraceCount = entity.TraceCount,
ProofContent = entity.ProofContent,
CompressedContent = entity.CompressedContent,
DsseEnvelopeId = entity.DsseEnvelopeId,
OciArtifactDigest = entity.OciArtifactDigest,
RekorEntryId = entity.RekorEntryId,
GeneratorVersion = entity.GeneratorVersion,
GeneratedAtUtc = entity.GeneratedAtUtc,
CreatedAtUtc = entity.CreatedAtUtc,
UpdatedAtUtc = entity.UpdatedAtUtc
};
}
}

View File

@@ -4,6 +4,7 @@ using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Scanner.Contracts;
using StellaOps.Scanner.ReachabilityDrift;
using StellaOps.Scanner.Storage.EfCore.Models;
using StellaOps.Scanner.Storage.Repositories;
using System.Collections.Immutable;
using System.Text.Json;
@@ -154,76 +155,47 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
ArgumentException.ThrowIfNullOrWhiteSpace(headScanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT id, base_scan_id, head_scan_id, language, detected_at, result_digest
FROM {DriftResultsTable}
WHERE tenant_id = @p0 AND head_scan_id = @p1 AND language = @p2
ORDER BY detected_at DESC
LIMIT 1
""";
var trimmedHead = headScanId.Trim();
var trimmedLang = language.Trim();
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var header = await dbContext.Database.SqlQueryRaw<DriftHeaderRow>(
sql, tenantScope.TenantId, headScanId.Trim(), language.Trim())
var entity = await dbContext.ReachabilityDriftResults
.Include(e => e.DriftedSinks)
.Where(e => e.TenantId == tenantScope.TenantId && e.HeadScanId == trimmedHead && e.Language == trimmedLang)
.OrderByDescending(e => e.DetectedAt)
.FirstOrDefaultAsync(ct)
.ConfigureAwait(false);
if (header is null)
{
return null;
}
return await LoadResultAsync(connection, header, tenantScope.TenantId, ct).ConfigureAwait(false);
return entity is not null ? MapEntityToResult(entity) : null;
}
public async Task<ReachabilityDriftResult?> TryGetByIdAsync(Guid driftId, CancellationToken ct = default, string? tenantId = null)
{
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT id, base_scan_id, head_scan_id, language, detected_at, result_digest
FROM {DriftResultsTable}
WHERE tenant_id = @p0 AND id = @p1
LIMIT 1
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var header = await dbContext.Database.SqlQueryRaw<DriftHeaderRow>(
sql, tenantScope.TenantId, driftId)
.FirstOrDefaultAsync(ct)
var entity = await dbContext.ReachabilityDriftResults
.Include(e => e.DriftedSinks)
.FirstOrDefaultAsync(e => e.TenantId == tenantScope.TenantId && e.Id == driftId, ct)
.ConfigureAwait(false);
if (header is null)
{
return null;
}
return await LoadResultAsync(connection, header, tenantScope.TenantId, ct).ConfigureAwait(false);
return entity is not null ? MapEntityToResult(entity) : null;
}
public async Task<bool> ExistsAsync(Guid driftId, CancellationToken ct = default, string? tenantId = null)
{
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT CAST(1 AS integer) AS "Value"
FROM {DriftResultsTable}
WHERE tenant_id = @p0 AND id = @p1
LIMIT 1
""";
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var result = await dbContext.Database.SqlQueryRaw<int>(
sql, tenantScope.TenantId, driftId)
.FirstOrDefaultAsync(ct)
return await dbContext.ReachabilityDriftResults
.AnyAsync(e => e.TenantId == tenantScope.TenantId && e.Id == driftId, ct)
.ConfigureAwait(false);
return result != 0;
}
public async Task<IReadOnlyList<DriftedSink>> ListSinksAsync(
@@ -244,37 +216,20 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
throw new ArgumentOutOfRangeException(nameof(limit));
}
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT
id,
sink_node_id,
symbol,
sink_category,
direction,
cause_kind,
cause_description,
cause_symbol,
cause_file,
cause_line,
code_change_id,
compressed_path,
associated_vulns
FROM {DriftedSinksTable}
WHERE tenant_id = @p0 AND drift_result_id = @p1 AND direction = @p2
ORDER BY sink_node_id ASC
OFFSET @p3 LIMIT @p4
""";
var directionValue = ToDbValue(direction);
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<DriftSinkRow>(
sql, tenantScope.TenantId, driftId, ToDbValue(direction), offset, limit)
var entities = await dbContext.DriftedSinks
.Where(e => e.TenantId == tenantScope.TenantId && e.DriftResultId == driftId && e.Direction == directionValue)
.OrderBy(e => e.SinkNodeId)
.Skip(offset)
.Take(limit)
.ToListAsync(ct)
.ConfigureAwait(false);
return rows.Select(r => r.ToModel(direction)).ToList();
return entities.Select(e => MapSinkEntityToModel(e, direction)).ToList();
}
private static IEnumerable<SinkInsertParams> EnumerateSinkParams(
@@ -315,61 +270,71 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
string CauseKind, string CauseDescription, string? CauseSymbol, string? CauseFile,
int? CauseLine, Guid? CodeChangeId, string CompressedPath, string? AssociatedVulns);
private async Task<ReachabilityDriftResult> LoadResultAsync(
NpgsqlConnection connection,
DriftHeaderRow header,
Guid tenantId,
CancellationToken ct)
private static ReachabilityDriftResult MapEntityToResult(ReachabilityDriftResultEntity entity)
{
var sinksSql = $"""
SELECT
id,
sink_node_id,
symbol,
sink_category,
direction,
cause_kind,
cause_description,
cause_symbol,
cause_file,
cause_line,
code_change_id,
compressed_path,
associated_vulns
FROM {DriftedSinksTable}
WHERE tenant_id = @p0 AND drift_result_id = @p1
ORDER BY direction ASC, sink_node_id ASC
""";
var sinks = entity.DriftedSinks ?? [];
var reachableDirection = ToDbValue(DriftDirection.BecameReachable);
var unreachableDirection = ToDbValue(DriftDirection.BecameUnreachable);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<DriftSinkRow>(
sinksSql, tenantId, header.id)
.ToListAsync(ct)
.ConfigureAwait(false);
var reachable = rows
.Where(r => string.Equals(r.direction, ToDbValue(DriftDirection.BecameReachable), StringComparison.Ordinal))
.Select(r => r.ToModel(DriftDirection.BecameReachable))
var reachable = sinks
.Where(s => string.Equals(s.Direction, reachableDirection, StringComparison.Ordinal))
.Select(s => MapSinkEntityToModel(s, DriftDirection.BecameReachable))
.OrderBy(s => s.SinkNodeId, StringComparer.Ordinal)
.ToImmutableArray();
var unreachable = rows
.Where(r => string.Equals(r.direction, ToDbValue(DriftDirection.BecameUnreachable), StringComparison.Ordinal))
.Select(r => r.ToModel(DriftDirection.BecameUnreachable))
var unreachable = sinks
.Where(s => string.Equals(s.Direction, unreachableDirection, StringComparison.Ordinal))
.Select(s => MapSinkEntityToModel(s, DriftDirection.BecameUnreachable))
.OrderBy(s => s.SinkNodeId, StringComparer.Ordinal)
.ToImmutableArray();
return new ReachabilityDriftResult
{
Id = header.id,
BaseScanId = header.base_scan_id,
HeadScanId = header.head_scan_id,
Language = header.language,
DetectedAt = header.detected_at,
Id = entity.Id,
BaseScanId = entity.BaseScanId,
HeadScanId = entity.HeadScanId,
Language = entity.Language,
DetectedAt = entity.DetectedAt,
NewlyReachable = reachable,
NewlyUnreachable = unreachable,
ResultDigest = header.result_digest
ResultDigest = entity.ResultDigest
};
}
private static DriftedSink MapSinkEntityToModel(DriftedSinkEntity entity, DriftDirection direction)
{
var path = JsonSerializer.Deserialize<CompressedPath>(entity.CompressedPath, JsonOptions)
?? new CompressedPath
{
Entrypoint = new PathNode { NodeId = string.Empty, Symbol = string.Empty },
Sink = new PathNode { NodeId = string.Empty, Symbol = string.Empty },
IntermediateCount = 0,
KeyNodes = ImmutableArray<PathNode>.Empty
};
var vulns = string.IsNullOrWhiteSpace(entity.AssociatedVulns)
? ImmutableArray<AssociatedVuln>.Empty
: (JsonSerializer.Deserialize<AssociatedVuln[]>(entity.AssociatedVulns!, JsonOptions) ?? [])
.ToImmutableArray();
return new DriftedSink
{
Id = entity.Id,
SinkNodeId = entity.SinkNodeId,
Symbol = entity.Symbol,
SinkCategory = ParseSinkCategory(entity.SinkCategory),
Direction = direction,
Cause = new DriftCause
{
Kind = ParseCauseKind(entity.CauseKind),
Description = entity.CauseDescription,
ChangedSymbol = entity.CauseSymbol,
ChangedFile = entity.CauseFile,
ChangedLine = entity.CauseLine,
CodeChangeId = entity.CodeChangeId
},
Path = path,
AssociatedVulns = vulns
};
}
@@ -438,67 +403,4 @@ public sealed class PostgresReachabilityDriftResultRepository : IReachabilityDri
};
}
private sealed class DriftHeaderRow
{
public Guid id { get; init; }
public string base_scan_id { get; init; } = string.Empty;
public string head_scan_id { get; init; } = string.Empty;
public string language { get; init; } = string.Empty;
public DateTimeOffset detected_at { get; init; }
public string result_digest { get; init; } = string.Empty;
}
private sealed class DriftSinkRow
{
public Guid id { get; init; }
public string sink_node_id { get; init; } = string.Empty;
public string symbol { get; init; } = string.Empty;
public string sink_category { get; init; } = string.Empty;
public string direction { get; init; } = string.Empty;
public string cause_kind { get; init; } = string.Empty;
public string cause_description { get; init; } = string.Empty;
public string? cause_symbol { get; init; }
public string? cause_file { get; init; }
public int? cause_line { get; init; }
public Guid? code_change_id { get; init; }
public string compressed_path { get; init; } = "{}";
public string? associated_vulns { get; init; }
public DriftedSink ToModel(DriftDirection direction)
{
var path = JsonSerializer.Deserialize<CompressedPath>(compressed_path, JsonOptions)
?? new CompressedPath
{
Entrypoint = new PathNode { NodeId = string.Empty, Symbol = string.Empty },
Sink = new PathNode { NodeId = string.Empty, Symbol = string.Empty },
IntermediateCount = 0,
KeyNodes = ImmutableArray<PathNode>.Empty
};
var vulns = string.IsNullOrWhiteSpace(associated_vulns)
? ImmutableArray<AssociatedVuln>.Empty
: (JsonSerializer.Deserialize<AssociatedVuln[]>(associated_vulns!, JsonOptions) ?? [])
.ToImmutableArray();
return new DriftedSink
{
Id = id,
SinkNodeId = sink_node_id,
Symbol = symbol,
SinkCategory = ParseSinkCategory(sink_category),
Direction = direction,
Cause = new DriftCause
{
Kind = ParseCauseKind(cause_kind),
Description = cause_description,
ChangedSymbol = cause_symbol,
ChangedFile = cause_file,
ChangedLine = cause_line,
CodeChangeId = code_change_id
},
Path = path,
AssociatedVulns = vulns
};
}
}
}

View File

@@ -67,15 +67,20 @@ public sealed class PostgresScanManifestRepository : IScanManifestRepository
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var result = await dbContext.Database.SqlQueryRaw<ManifestInsertResult>(
// INSERT...RETURNING is non-composable SQL; EF Core's SqlQueryRaw cannot wrap
// it in a subquery (which .FirstAsync would require). Materialize the full
// result set first, then pick the single returned row in-memory.
var results = await dbContext.Database.SqlQueryRaw<ManifestInsertResult>(
sql,
manifest.ScanId, manifest.ManifestHash, manifest.SbomHash, manifest.RulesHash,
manifest.FeedHash, manifest.PolicyHash, manifest.ScanStartedAt,
(object?)manifest.ScanCompletedAt ?? DBNull.Value,
manifest.ManifestContent, manifest.ScannerVersion)
.FirstAsync(cancellationToken)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var result = results.First();
manifest.ManifestId = result.manifest_id;
manifest.CreatedAt = result.created_at;
return manifest;

View File

@@ -3,6 +3,7 @@ using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Npgsql;
using StellaOps.Scanner.SmartDiff.Detection;
using StellaOps.Scanner.Storage.EfCore.Models;
using System.Collections.Immutable;
using System.Text.Json;
@@ -66,55 +67,34 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
{
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT
candidate_id, vuln_id, purl, image_digest,
suggested_status::TEXT AS suggested_status, justification::TEXT AS justification, rationale,
evidence_links, confidence, generated_at, expires_at,
requires_review, review_action::TEXT AS review_action, reviewed_by, reviewed_at, review_comment
FROM {VexCandidatesTable}
WHERE tenant_id = @p0
AND image_digest = @p1
ORDER BY confidence DESC
""";
var trimmedDigest = imageDigest.Trim();
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var rows = await dbContext.Database.SqlQueryRaw<VexCandidateRow>(
sql, tenantScope.TenantId, imageDigest.Trim())
var entities = await dbContext.VexCandidates
.Where(e => e.TenantId == tenantScope.TenantId && e.ImageDigest == trimmedDigest)
.OrderByDescending(e => e.Confidence)
.ToListAsync(ct)
.ConfigureAwait(false);
return rows.Select(r => r.ToCandidate()).ToList();
return entities.Select(MapEntityToCandidate).ToList();
}
public async Task<VexCandidate?> GetCandidateAsync(string candidateId, CancellationToken ct = default, string? tenantId = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(candidateId);
var tenantScope = ScannerTenantScope.Resolve(tenantId);
var sql = $"""
SELECT
candidate_id, vuln_id, purl, image_digest,
suggested_status::TEXT AS suggested_status, justification::TEXT AS justification, rationale,
evidence_links, confidence, generated_at, expires_at,
requires_review, review_action::TEXT AS review_action, reviewed_by, reviewed_at, review_comment
FROM {VexCandidatesTable}
WHERE tenant_id = @p0
AND candidate_id = @p1
""";
var trimmedId = candidateId.Trim();
await using var connection = await _dataSource.OpenConnectionAsync(tenantScope.TenantContext, ct).ConfigureAwait(false);
await using var dbContext = ScannerDbContextFactory.Create(connection, _dataSource.CommandTimeoutSeconds, SchemaName);
var row = await dbContext.Database.SqlQueryRaw<VexCandidateRow>(
sql, tenantScope.TenantId, candidateId.Trim())
.FirstOrDefaultAsync(ct)
var entity = await dbContext.VexCandidates
.FirstOrDefaultAsync(e => e.TenantId == tenantScope.TenantId && e.CandidateId == trimmedId, ct)
.ConfigureAwait(false);
return row?.ToCandidate();
return entity is not null ? MapEntityToCandidate(entity) : null;
}
public async Task<bool> ReviewCandidateAsync(string candidateId, VexCandidateReview review, CancellationToken ct = default, string? tenantId = null)
@@ -232,70 +212,47 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
};
}
/// <summary>
/// Row mapping class for EF Core SqlQueryRaw.
/// </summary>
private sealed class VexCandidateRow
private static VexCandidate MapEntityToCandidate(VexCandidateEntity entity)
{
public string candidate_id { get; set; } = "";
public string vuln_id { get; set; } = "";
public string purl { get; set; } = "";
public string image_digest { get; set; } = "";
public string suggested_status { get; set; } = "not_affected";
public string justification { get; set; } = "vulnerable_code_not_present";
public string rationale { get; set; } = "";
public string evidence_links { get; set; } = "[]";
public decimal confidence { get; set; }
public DateTimeOffset generated_at { get; set; }
public DateTimeOffset expires_at { get; set; }
public bool requires_review { get; set; }
public string? review_action { get; set; }
public string? reviewed_by { get; set; }
public DateTimeOffset? reviewed_at { get; set; }
public string? review_comment { get; set; }
var links = JsonSerializer.Deserialize<List<EvidenceLink>>(entity.EvidenceLinks, JsonOptions)
?? [];
public VexCandidate ToCandidate()
return new VexCandidate(
CandidateId: entity.CandidateId,
FindingKey: new FindingKey(entity.VulnId, entity.Purl),
SuggestedStatus: ParseVexStatus(entity.SuggestedStatus),
Justification: ParseJustification(entity.Justification),
Rationale: entity.Rationale,
EvidenceLinks: [.. links],
Confidence: (double)entity.Confidence,
ImageDigest: entity.ImageDigest,
GeneratedAt: entity.GeneratedAt,
ExpiresAt: entity.ExpiresAt,
RequiresReview: entity.RequiresReview);
}
private static VexStatusType ParseVexStatus(string value)
{
return value.ToLowerInvariant() switch
{
var links = JsonSerializer.Deserialize<List<EvidenceLink>>(evidence_links, JsonOptions)
?? [];
"affected" => VexStatusType.Affected,
"not_affected" => VexStatusType.NotAffected,
"fixed" => VexStatusType.Fixed,
"under_investigation" => VexStatusType.UnderInvestigation,
_ => VexStatusType.Unknown
};
}
return new VexCandidate(
CandidateId: candidate_id,
FindingKey: new FindingKey(vuln_id, purl),
SuggestedStatus: ParseVexStatus(suggested_status),
Justification: ParseJustification(justification),
Rationale: rationale,
EvidenceLinks: [.. links],
Confidence: (double)confidence,
ImageDigest: image_digest,
GeneratedAt: generated_at,
ExpiresAt: expires_at,
RequiresReview: requires_review);
}
private static VexStatusType ParseVexStatus(string value)
private static VexJustification ParseJustification(string value)
{
return value.ToLowerInvariant() switch
{
return value.ToLowerInvariant() switch
{
"affected" => VexStatusType.Affected,
"not_affected" => VexStatusType.NotAffected,
"fixed" => VexStatusType.Fixed,
"under_investigation" => VexStatusType.UnderInvestigation,
_ => VexStatusType.Unknown
};
}
private static VexJustification ParseJustification(string value)
{
return value.ToLowerInvariant() switch
{
"component_not_present" => VexJustification.ComponentNotPresent,
"vulnerable_code_not_present" => VexJustification.VulnerableCodeNotPresent,
"vulnerable_code_not_in_execute_path" => VexJustification.VulnerableCodeNotInExecutePath,
"vulnerable_code_cannot_be_controlled_by_adversary" => VexJustification.VulnerableCodeCannotBeControlledByAdversary,
"inline_mitigations_already_exist" => VexJustification.InlineMitigationsAlreadyExist,
_ => VexJustification.VulnerableCodeNotPresent
};
}
"component_not_present" => VexJustification.ComponentNotPresent,
"vulnerable_code_not_present" => VexJustification.VulnerableCodeNotPresent,
"vulnerable_code_not_in_execute_path" => VexJustification.VulnerableCodeNotInExecutePath,
"vulnerable_code_cannot_be_controlled_by_adversary" => VexJustification.VulnerableCodeCannotBeControlledByAdversary,
"inline_mitigations_already_exist" => VexJustification.InlineMitigationsAlreadyExist,
_ => VexJustification.VulnerableCodeNotPresent
};
}
}

View File

@@ -1,4 +1,5 @@
using System.Collections.Generic;
using System.Net.Http;
using System.Security.Claims;
using System.Text.Encodings.Web;
using System.Threading.Tasks;
@@ -13,6 +14,7 @@ using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Npgsql;
using StellaOps.Auth.Abstractions;
using StellaOps.Infrastructure.Postgres.Testing;
using StellaOps.Scanner.Reachability.Slices;
using StellaOps.Scanner.Storage;
@@ -27,6 +29,14 @@ namespace StellaOps.Scanner.WebService.Tests;
public sealed class ScannerApplicationFactory : WebApplicationFactory<ServiceStatus>, IAsyncLifetime, IAsyncDisposable
{
/// <summary>
/// Default tenant identifier injected into every test client via the
/// <c>X-StellaOps-Tenant</c> header so that <c>RequireTenant()</c> endpoint filters
/// resolve a valid tenant context without requiring per-request configuration.
/// Tests that need a specific tenant can override this header per request.
/// </summary>
public const string DefaultTestTenant = "default";
private readonly bool skipPostgres;
private readonly Dictionary<string, string?> configuration = new(StringComparer.OrdinalIgnoreCase)
{
@@ -151,6 +161,32 @@ public sealed class ScannerApplicationFactory : WebApplicationFactory<ServiceSta
}
}
/// <summary>
/// Creates an HTTP client with the default test tenant header pre-configured.
/// All endpoints under <c>MapGroup(...).RequireTenant()</c> require a resolved tenant
/// context; this header satisfies that requirement for generic test scenarios.
/// Tests that need a different tenant can override the header per request using
/// <c>request.Headers.TryAddWithoutValidation(StellaOpsHttpHeaderNames.Tenant, "other-tenant")</c>.
/// </summary>
public new HttpClient CreateClient()
{
var client = base.CreateClient();
client.DefaultRequestHeaders.TryAddWithoutValidation(StellaOpsHttpHeaderNames.Tenant, DefaultTestTenant);
return client;
}
/// <summary>
/// Creates an HTTP client with the given options and the default test tenant header pre-configured.
/// This override ensures that callers using <c>CreateClient(options)</c> also receive the tenant
/// header, which is required by <c>RequireTenant()</c> endpoint filters.
/// </summary>
public new HttpClient CreateClient(WebApplicationFactoryClientOptions options)
{
var client = base.CreateClient(options);
client.DefaultRequestHeaders.TryAddWithoutValidation(StellaOpsHttpHeaderNames.Tenant, DefaultTestTenant);
return client;
}
protected override void ConfigureWebHost(IWebHostBuilder builder)
{
if (!initialized)

View File

@@ -107,7 +107,7 @@ export class UnifiedSearchClient {
limit = 10,
): Observable<UnifiedSearchResponse> {
const normalizedQuery = query.trim();
if (normalizedQuery.length < 2) {
if (normalizedQuery.length < 1) {
return of({
query,
topK: limit,

View File

@@ -1,14 +1,17 @@
import { Injectable, signal } from '@angular/core';
import { Subject } from 'rxjs';
import type { EntityCard, SynthesisResult, UnifiedSearchDomain } from '../api/unified-search.models';
export interface SearchToChatContext {
query: string;
entityCards: any[]; // EntityCard[]
synthesis: any | null; // SynthesisResult
entityCards: EntityCard[];
synthesis: SynthesisResult | null;
suggestedPrompt?: string;
}
export interface ChatToSearchContext {
query: string;
domain?: string;
domain?: UnifiedSearchDomain;
entityKey?: string;
}
@@ -16,6 +19,8 @@ export interface ChatToSearchContext {
export class SearchChatContextService {
private readonly _searchToChat = signal<SearchToChatContext | null>(null);
private readonly _chatToSearch = signal<ChatToSearchContext | null>(null);
private readonly _chatToSearchRequests = new Subject<void>();
readonly chatToSearchRequested$ = this._chatToSearchRequests.asObservable();
setSearchToChat(context: SearchToChatContext): void {
this._searchToChat.set(context);
@@ -29,6 +34,7 @@ export class SearchChatContextService {
setChatToSearch(context: ChatToSearchContext): void {
this._chatToSearch.set(context);
this._chatToSearchRequests.next();
}
consumeChatToSearch(): ChatToSearchContext | null {

View File

@@ -107,10 +107,20 @@ interface MessageSegment {
<ul class="citations-list">
@for (citation of turn.citations; track citation.path) {
<li class="citation-item">
<stellaops-object-link-chip
[link]="citationToLink(citation)"
[verified]="citation.verified"
(navigate)="onLinkNavigate($event)"/>
<div class="citation-item__actions">
<stellaops-object-link-chip
[link]="citationToLink(citation)"
[verified]="citation.verified"
(navigate)="onLinkNavigate($event)"/>
<button
type="button"
class="citation-search-related"
(click)="onSearchRelated(citation)"
title="Search related results"
>
Search related
</button>
</div>
</li>
}
</ul>
@@ -118,7 +128,7 @@ interface MessageSegment {
}
<!-- Search for more -->
@if (turn.role === 'assistant') {
@if (turn.role === 'assistant' && (turn.citations?.length ?? 0) > 0) {
<button
type="button"
class="search-more-link"
@@ -337,6 +347,29 @@ interface MessageSegment {
display: inline-block;
}
.citation-item__actions {
display: inline-flex;
align-items: center;
gap: 0.375rem;
}
.citation-search-related {
border: 1px solid var(--color-border-secondary, #d1d5db);
background: transparent;
color: var(--color-text-secondary, #4b5563);
border-radius: 999px;
font-size: var(--font-size-sm, 0.6875rem);
padding: 0.125rem 0.5rem;
cursor: pointer;
transition: background-color 0.12s, border-color 0.12s;
}
.citation-search-related:hover {
background: var(--color-nav-hover, #f3f4f6);
border-color: var(--color-brand-primary, #2563eb);
color: var(--color-brand-primary, #2563eb);
}
.search-more-link {
display: inline-flex;
align-items: center;
@@ -522,12 +555,24 @@ export class ChatMessageComponent {
*/
onSearchForMore(): void {
const query = this.extractSearchQuery(this.turn.content);
const firstCitation = this.turn.citations?.[0];
const domain = firstCitation
? this.mapCitationTypeToDomain(firstCitation.type)
: undefined;
this.searchChatContext.setChatToSearch({
query,
domain,
});
this.searchForMore.emit(query);
}
onSearchRelated(citation: { type: string; path: string }): void {
const query = this.extractSearchQueryFromCitation(citation.type, citation.path);
const domain = this.mapCitationTypeToDomain(citation.type);
this.searchChatContext.setChatToSearch({ query, domain });
this.searchForMore.emit(query);
}
private extractSearchQuery(content: string): string {
// Extract CVE IDs if present
const cveRegex = /CVE-\d{4}-\d{4,}/gi;
@@ -548,6 +593,48 @@ export class ChatMessageComponent {
return plainText.length > 100 ? plainText.substring(0, 100) : plainText;
}
private extractSearchQueryFromCitation(type: string, path: string): string {
const normalizedPath = (path ?? '').trim();
const cveMatch = normalizedPath.match(/CVE-\d{4}-\d{4,}/i);
if (cveMatch && cveMatch[0]) {
return cveMatch[0].toUpperCase();
}
if (type === 'policy') {
return normalizedPath.split(':')[0] || normalizedPath;
}
if (type === 'docs') {
return normalizedPath.replace(/[/_-]+/g, ' ').trim();
}
return normalizedPath.length > 120
? normalizedPath.substring(0, 120)
: normalizedPath;
}
private mapCitationTypeToDomain(type: string): 'knowledge' | 'findings' | 'vex' | 'policy' | 'platform' | undefined {
switch (type) {
case 'docs':
return 'knowledge';
case 'vex':
return 'vex';
case 'policy':
return 'policy';
case 'finding':
case 'scan':
case 'sbom':
case 'reach':
return 'findings';
case 'runtime':
case 'attest':
case 'auth':
return 'platform';
default:
return undefined;
}
}
async copyMessage(): Promise<void> {
try {
await navigator.clipboard.writeText(this.turn.content);

View File

@@ -532,6 +532,16 @@ export class ChatComponent implements OnInit, OnDestroy {
@Input() tenantId = 'default';
@Input() context?: ConversationContext;
@Input() conversationId?: string;
@Input()
set initialUserMessage(value: string | null | undefined) {
const normalized = value?.trim() ?? '';
if (!normalized) {
return;
}
this.pendingInitialMessage = normalized;
this.trySendPendingInitialMessage();
}
@Output() close = new EventEmitter<void>();
@Output() linkNavigate = new EventEmitter<ParsedObjectLink>();
@@ -544,6 +554,7 @@ export class ChatComponent implements OnInit, OnDestroy {
private readonly chatService = inject(ChatService);
private readonly router = inject(Router);
private readonly destroy$ = new Subject<void>();
private pendingInitialMessage: string | null = null;
inputValue = '';
readonly progressStage = signal<string | null>(null);
@@ -632,9 +643,13 @@ export class ChatComponent implements OnInit, OnDestroy {
// Start or load conversation
if (this.conversationId) {
this.chatService.getConversation(this.conversationId).subscribe();
this.chatService.getConversation(this.conversationId).subscribe(() => {
this.trySendPendingInitialMessage();
});
} else {
this.chatService.createConversation(this.tenantId, this.context).subscribe();
this.chatService.createConversation(this.tenantId, this.context).subscribe(() => {
this.trySendPendingInitialMessage();
});
}
}
@@ -669,7 +684,9 @@ export class ChatComponent implements OnInit, OnDestroy {
startNewConversation(): void {
this.chatService.clearConversation();
this.chatService.createConversation(this.tenantId, this.context).subscribe();
this.chatService.createConversation(this.tenantId, this.context).subscribe(() => {
this.trySendPendingInitialMessage();
});
}
retryLastAction(): void {
@@ -702,4 +719,20 @@ export class ChatComponent implements OnInit, OnDestroy {
el.scrollTop = el.scrollHeight;
}
}
private trySendPendingInitialMessage(): void {
const message = this.pendingInitialMessage?.trim();
if (!message || this.isStreaming()) {
return;
}
const conversation = this.conversation();
if (!conversation) {
return;
}
this.pendingInitialMessage = null;
this.chatService.sendMessage(conversation.conversationId, message);
this.inputValue = '';
}
}

View File

@@ -106,7 +106,9 @@ export interface CreateConversationRequest {
* Request to add a turn (message) to a conversation.
*/
export interface AddTurnRequest {
message: string;
content: string;
/** @deprecated Use `content` */
message?: string;
}
/**
@@ -266,25 +268,25 @@ export function parseObjectLinks(content: string): ParsedObjectLink[] {
export function getObjectLinkUrl(link: ParsedObjectLink): string {
switch (link.type) {
case 'sbom':
return `/sbom/${encodeURIComponent(link.path)}`;
return `/security/supply-chain-data/viewer?focus=${encodeURIComponent(link.path)}`;
case 'reach':
return `/reachability/${encodeURIComponent(link.path)}`;
return `/security/reachability?q=${encodeURIComponent(link.path)}`;
case 'runtime':
return `/timeline/${encodeURIComponent(link.path)}`;
return `/ops/operations/signals?q=${encodeURIComponent(link.path)}`;
case 'vex':
return `/vex-hub/${encodeURIComponent(link.path)}`;
return `/security/advisories-vex?q=${encodeURIComponent(link.path)}`;
case 'attest':
return `/proof-chain/${encodeURIComponent(link.path)}`;
return `/evidence/proofs?q=${encodeURIComponent(link.path)}`;
case 'auth':
return `/admin/auth/${encodeURIComponent(link.path)}`;
return `/settings/identity-providers?q=${encodeURIComponent(link.path)}`;
case 'docs':
return `/docs/${encodeURIComponent(link.path)}`;
case 'finding':
return `/triage/findings/${encodeURIComponent(link.path)}`;
return `/security/findings/${encodeURIComponent(link.path)}`;
case 'scan':
return `/scans/${encodeURIComponent(link.path)}`;
return `/security/scans/${encodeURIComponent(link.path)}`;
case 'policy':
return `/policy/${encodeURIComponent(link.path)}`;
return `/ops/policy?q=${encodeURIComponent(link.path)}`;
default:
return '#';
}

View File

@@ -187,7 +187,7 @@ export class ChatService {
'Content-Type': 'application/json',
Accept: 'text/event-stream',
},
body: JSON.stringify({ message }),
body: JSON.stringify({ content: message }),
signal: abortController.signal,
})
.then((response) => {

View File

@@ -0,0 +1,197 @@
import { ChangeDetectionStrategy, Component, ElementRef, HostListener, ViewChild, inject, signal } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { takeUntilDestroyed } from '@angular/core/rxjs-interop';
import { PlatformContextStore } from '../../core/context/platform-context.store';
import { SearchChatContextService, type SearchToChatContext } from '../../core/services/search-chat-context.service';
import { ChatComponent } from '../advisory-ai/chat';
import { SecurityFindingsPageComponent } from './security-findings-page.component';
@Component({
selector: 'app-security-triage-chat-host',
standalone: true,
imports: [SecurityFindingsPageComponent, ChatComponent],
changeDetection: ChangeDetectionStrategy.OnPush,
template: `
<section class="triage-host">
<app-security-findings-page />
<button
#assistantFab
type="button"
class="assistant-fab"
(click)="openAssistantPanel()"
aria-label="Open AdvisoryAI assistant"
>
Ask AdvisoryAI
</button>
@if (assistantOpen()) {
<section
#assistantDrawer
class="assistant-drawer"
role="dialog"
aria-label="AdvisoryAI assistant"
tabindex="-1"
>
<stellaops-chat
[tenantId]="context.tenantId() ?? 'default'"
[initialUserMessage]="assistantInitialMessage()"
(close)="closeAssistantPanel()"
(searchForMore)="onChatSearchForMore($event)"
/>
</section>
}
</section>
`,
styles: [`
.triage-host {
position: relative;
min-height: 100%;
}
.assistant-fab {
position: fixed;
right: 1.25rem;
bottom: 1.25rem;
z-index: 135;
border: 1px solid #7dd3fc;
background: #f0f9ff;
color: #0369a1;
border-radius: 999px;
padding: 0.5rem 0.9rem;
font-size: 0.75rem;
box-shadow: 0 4px 14px rgba(2, 132, 199, 0.2);
cursor: pointer;
}
.assistant-fab:hover {
background: #e0f2fe;
border-color: #0284c7;
color: #0c4a6e;
}
.assistant-drawer {
position: fixed;
top: 5rem;
right: 1rem;
width: min(520px, calc(100vw - 2rem));
height: min(78vh, 760px);
border: 1px solid var(--color-border-primary);
border-radius: var(--radius-lg);
background: var(--color-surface-primary);
box-shadow: var(--shadow-dropdown);
z-index: 140;
overflow: hidden;
}
@media (max-width: 900px) {
.assistant-fab {
right: 0.75rem;
bottom: 0.75rem;
}
.assistant-drawer {
top: 0;
right: 0;
width: 100vw;
height: 100vh;
border-radius: 0;
}
}
`],
})
export class SecurityTriageChatHostComponent {
private readonly route = inject(ActivatedRoute);
private readonly router = inject(Router);
private readonly searchChatContext = inject(SearchChatContextService);
readonly context = inject(PlatformContextStore);
@ViewChild('assistantDrawer') private assistantDrawerRef?: ElementRef<HTMLElement>;
@ViewChild('assistantFab') private assistantFabRef?: ElementRef<HTMLButtonElement>;
readonly assistantOpen = signal(false);
readonly assistantInitialMessage = signal<string | null>(null);
constructor() {
this.route.queryParamMap
.pipe(takeUntilDestroyed())
.subscribe((params) => {
if ((params.get('openChat') ?? '').toLowerCase() === 'true') {
this.openAssistantFromSearchIntent(params.get('q') ?? '');
}
});
}
openAssistantPanel(): void {
this.assistantInitialMessage.set(
'Help me prioritize the current security triage findings and explain the top risk first.',
);
this.assistantOpen.set(true);
this.queueDrawerFocus();
}
closeAssistantPanel(): void {
this.assistantOpen.set(false);
setTimeout(() => this.assistantFabRef?.nativeElement?.focus(), 0);
}
onChatSearchForMore(query: string): void {
const normalizedQuery = query.trim();
if (!normalizedQuery) {
return;
}
this.assistantOpen.set(false);
}
private openAssistantFromSearchIntent(querySeed: string): void {
const searchContext = this.searchChatContext.consumeSearchToChat();
const prompt = this.buildAssistantPrompt(searchContext, querySeed);
this.assistantInitialMessage.set(prompt);
this.assistantOpen.set(true);
this.queueDrawerFocus();
void this.router.navigate([], {
relativeTo: this.route,
replaceUrl: true,
queryParamsHandling: 'merge',
queryParams: { openChat: null },
});
}
private buildAssistantPrompt(searchContext: SearchToChatContext | null, querySeed: string): string {
if (searchContext?.suggestedPrompt?.trim()) {
return searchContext.suggestedPrompt.trim();
}
const query = searchContext?.query?.trim() || querySeed.trim();
const cards = (searchContext?.entityCards ?? []).slice(0, 5);
if (cards.length > 0) {
const cardSummary = cards
.map((card, index) =>
`${index + 1}. ${card.title} (${card.domain}${card.severity ? `, ${card.severity}` : ''})`)
.join('\n');
return `I searched for "${query || 'security issue'}" and got:\n${cardSummary}\nHelp me understand the risk and choose the best next action.`;
}
if (query) {
return `Help me understand "${query}" and guide me to the most relevant next step.`;
}
return 'Help me prioritize the current security triage findings and explain the top risk first.';
}
@HostListener('window:keydown.escape')
onEscapePressed(): void {
if (!this.assistantOpen()) {
return;
}
this.closeAssistantPanel();
}
private queueDrawerFocus(): void {
setTimeout(() => this.assistantDrawerRef?.nativeElement?.focus(), 0);
}
}

View File

@@ -10,12 +10,13 @@ import {
signal,
} from '@angular/core';
import { FormsModule } from '@angular/forms';
import { Router } from '@angular/router';
import { NavigationEnd, Router } from '@angular/router';
import { Subject, of } from 'rxjs';
import {
catchError,
debounceTime,
distinctUntilChanged,
filter,
switchMap,
takeUntil,
} from 'rxjs/operators';
@@ -33,6 +34,7 @@ import { EntityCardComponent } from '../../shared/components/entity-card/entity-
import { SynthesisPanelComponent } from '../../shared/components/synthesis-panel/synthesis-panel.component';
import { AmbientContextService } from '../../core/services/ambient-context.service';
import { SearchChatContextService } from '../../core/services/search-chat-context.service';
import { normalizeSearchActionRoute } from './search-route-matrix';
type SearchDomainFilter = 'all' | UnifiedSearchDomain;
@@ -52,7 +54,7 @@ type SearchDomainFilter = 'all' | UnifiedSearchDomain;
#searchInput
type="text"
class="search__input"
placeholder="Search everything..."
[placeholder]="inputPlaceholder()"
[ngModel]="query()"
(ngModelChange)="onQueryChange($event)"
(focus)="onFocus()"
@@ -67,6 +69,13 @@ type SearchDomainFilter = 'all' | UnifiedSearchDomain;
@if (showResults()) {
<div class="search__results" id="search-results">
@if (showDegradedModeBanner()) {
<div class="search__degraded-banner" role="status" aria-live="polite">
<span class="search__degraded-title">Fallback mode:</span>
{{ degradedModeMessage() }}
</div>
}
@if (isLoading()) {
<div class="search__loading">Searching...</div>
} @else if (query().trim().length >= 1 && cards().length === 0) {
@@ -203,7 +212,10 @@ type SearchDomainFilter = 'all' | UnifiedSearchDomain;
<div class="search__domain-grid">
@for (domain of domainGuide; track domain.title) {
<div class="search__domain-card">
<div class="search__domain-title">{{ domain.title }}</div>
<div class="search__domain-title">
<span class="search__domain-icon" aria-hidden="true">{{ domain.icon }}</span>
{{ domain.title }}
</div>
<div class="search__domain-desc">{{ domain.description }}</div>
<button
type="button"
@@ -346,6 +358,20 @@ type SearchDomainFilter = 'all' | UnifiedSearchDomain;
font-size: 0.875rem;
}
.search__degraded-banner {
padding: 0.5rem 0.75rem;
background: #fff7ed;
border-bottom: 1px solid #fed7aa;
color: #7c2d12;
font-size: 0.75rem;
line-height: 1.3;
}
.search__degraded-title {
font-weight: var(--font-weight-semibold);
margin-right: 0.25rem;
}
.search__cards {
padding: 0.25rem 0;
}
@@ -475,9 +501,24 @@ type SearchDomainFilter = 'all' | UnifiedSearchDomain;
font-size: 0.75rem;
font-weight: var(--font-weight-semibold);
color: var(--color-text-primary);
display: flex;
align-items: center;
gap: 0.35rem;
margin-bottom: 0.125rem;
}
.search__domain-icon {
display: inline-flex;
align-items: center;
justify-content: center;
width: 1.05rem;
height: 1.05rem;
border-radius: 999px;
background: var(--color-surface-tertiary);
font-size: 0.675rem;
line-height: 1;
}
.search__domain-desc {
font-size: 0.6875rem;
color: var(--color-text-muted);
@@ -608,7 +649,9 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
private readonly destroy$ = new Subject<void>();
private readonly searchTerms$ = new Subject<string>();
private readonly recentSearchStorageKey = 'stella-recent-searches';
private wasDegradedMode = false;
private escapeCount = 0;
private placeholderRotationHandle: ReturnType<typeof setInterval> | null = null;
@ViewChild('searchInput') searchInputRef!: ElementRef<HTMLInputElement>;
@@ -621,24 +664,46 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
readonly isFocused = signal(false);
readonly isLoading = signal(false);
readonly selectedIndex = signal(0);
readonly placeholderIndex = signal(0);
readonly searchResponse = signal<UnifiedSearchResponse | null>(null);
readonly recentSearches = signal<string[]>([]);
readonly activeDomainFilter = signal<SearchDomainFilter>('all');
readonly expandedCardKey = signal<string | null>(null);
readonly pendingDomainFilter = signal<SearchDomainFilter | null>(null);
readonly showResults = computed(() => this.isFocused());
readonly diagnosticsMode = computed(() => this.searchResponse()?.diagnostics?.mode ?? 'unknown');
readonly isDegradedMode = computed(() => {
const mode = this.diagnosticsMode();
return mode === 'legacy-fallback' || mode === 'fallback-empty';
});
readonly showDegradedModeBanner = computed(() =>
!this.isLoading() &&
this.query().trim().length >= 1 &&
this.isDegradedMode(),
);
readonly degradedModeMessage = computed(() => {
if (this.diagnosticsMode() === 'fallback-empty') {
return 'Unified search is unavailable and legacy fallback returned no results. Try a broader query or retry.';
}
return 'Showing legacy fallback results. Coverage and ranking may differ until unified search recovers.';
});
readonly domainGuide: ReadonlyArray<{
icon: string;
title: string;
description: string;
example: string;
}> = [
{ title: 'Security Findings', description: 'CVEs, vulnerabilities, and exposure data', example: 'CVE-2024-21626' },
{ title: 'VEX Statements', description: 'Vulnerability exploitability assessments', example: 'not_affected' },
{ title: 'Policy Rules', description: 'Release gate rules and enforcement', example: 'DENY-CRITICAL-PROD' },
{ title: 'Documentation', description: 'Guides, architecture, and runbooks', example: 'how to deploy' },
{ title: 'API Reference', description: 'OpenAPI endpoints and contracts', example: 'POST /api/v1/scanner/scans' },
{ title: 'Health Checks', description: 'System diagnostics and remediation', example: 'database connectivity' },
{ icon: 'S', title: 'Security Findings', description: 'CVEs, vulnerabilities, and exposure data', example: 'CVE-2024-21626' },
{ icon: 'V', title: 'VEX Statements', description: 'Vulnerability exploitability assessments', example: 'not_affected' },
{ icon: 'P', title: 'Policy Rules', description: 'Release gate rules and enforcement', example: 'DENY-CRITICAL-PROD' },
{ icon: 'D', title: 'Documentation', description: 'Guides, architecture, and runbooks', example: 'how to deploy' },
{ icon: 'A', title: 'API Reference', description: 'OpenAPI endpoints and contracts', example: 'POST /api/v1/scanner/scans' },
{ icon: 'H', title: 'Health Checks', description: 'System diagnostics and remediation', example: 'database connectivity' },
{ icon: 'R', title: 'Release Workflows', description: 'Promotion status, rollout history, and gate decisions', example: 'failed promotion' },
{ icon: 'C', title: 'Platform Catalog', description: 'Components, integrations, and environment inventory', example: 'registry integration' },
];
readonly contextualSuggestions = computed<string[]>(() => {
@@ -659,6 +724,16 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
return ['How do I deploy?', 'What is a VEX statement?', 'Show critical findings'];
});
readonly inputPlaceholder = computed(() => {
const suggestions = this.contextualSuggestions();
if (suggestions.length === 0) {
return 'Search everything...';
}
const index = this.placeholderIndex() % suggestions.length;
return `Try: ${suggestions[index]}`;
});
readonly cards = computed(() => this.searchResponse()?.cards ?? []);
readonly synthesis = computed(() => this.searchResponse()?.synthesis ?? null);
readonly refinements = computed(() => this.searchResponse()?.refinements ?? []);
@@ -687,6 +762,23 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
});
ngOnInit(): void {
this.placeholderRotationHandle = setInterval(() => {
this.placeholderIndex.update((current) => current + 1);
}, 4500);
this.searchChatContext.chatToSearchRequested$
.pipe(takeUntil(this.destroy$))
.subscribe(() => this.consumeChatToSearchContext());
this.router.events
.pipe(
filter((event): event is NavigationEnd => event instanceof NavigationEnd),
takeUntil(this.destroy$),
)
.subscribe(() => this.consumeChatToSearchContext());
this.consumeChatToSearchContext();
this.searchTerms$
.pipe(
debounceTime(200),
@@ -730,23 +822,38 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
this.searchResponse.set(response);
this.selectedIndex.set(0);
this.activeDomainFilter.set('all');
const pendingDomainFilter = this.pendingDomainFilter();
if (
pendingDomainFilter &&
response.cards.some((card) => card.domain === pendingDomainFilter)
) {
this.activeDomainFilter.set(pendingDomainFilter);
} else {
this.activeDomainFilter.set('all');
}
this.pendingDomainFilter.set(null);
this.expandedCardKey.set(null);
this.isLoading.set(false);
// Sprint 106 / G6: Emit search analytics events
this.emitSearchAnalytics(response);
this.trackDegradedMode(response);
});
}
ngOnDestroy(): void {
this.destroy$.next();
this.destroy$.complete();
if (this.placeholderRotationHandle) {
clearInterval(this.placeholderRotationHandle);
this.placeholderRotationHandle = null;
}
}
onFocus(): void {
this.isFocused.set(true);
this.escapeCount = 0;
this.consumeChatToSearchContext();
this.loadRecentSearches();
this.loadServerHistory();
}
@@ -840,20 +947,26 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
}
onAskAiFromCard(card: EntityCard): void {
const askPrompt = this.buildAskAiPromptForCard(card);
this.searchChatContext.setSearchToChat({
query: card.title,
query: this.query().trim() || card.title,
entityCards: [card],
synthesis: this.synthesis(),
suggestedPrompt: askPrompt,
});
this.closeResults();
void this.router.navigate(['/security/triage'], { queryParams: { openChat: 'true', q: card.title } });
void this.router.navigate(['/security/triage'], {
queryParams: { openChat: 'true', q: this.query().trim() || card.title },
});
}
onAskAiFromSynthesis(): void {
const askPrompt = this.buildAskAiPromptForSynthesis();
this.searchChatContext.setSearchToChat({
query: this.query(),
entityCards: this.filteredCards(),
synthesis: this.synthesis(),
suggestedPrompt: askPrompt,
});
this.closeResults();
void this.router.navigate(['/security/triage'], { queryParams: { openChat: 'true', q: this.query() } });
@@ -873,22 +986,26 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
): void {
const cards = this.filteredCards();
const position = cards.findIndex((c) => c.entityKey === event.entityKey);
const comment = this.promptFeedbackComment(event.signal);
this.searchClient.submitFeedback({
query: this.query(),
entityKey: event.entityKey,
domain: card.domain,
position: position >= 0 ? position : 0,
signal: event.signal,
comment,
});
}
onSynthesisFeedback(event: { signal: 'helpful' | 'not_helpful' }): void {
const comment = this.promptFeedbackComment(event.signal);
this.searchClient.submitFeedback({
query: this.query(),
entityKey: '__synthesis__',
domain: 'synthesis',
position: -1,
signal: event.signal,
comment,
});
}
@@ -935,10 +1052,12 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
}
private executeAction(action: EntityCardAction): void {
const normalizedRoute = action.route ? this.normalizeActionRoute(action.route) : undefined;
switch (action.actionType) {
case 'navigate':
if (action.route) {
void this.router.navigateByUrl(action.route);
if (normalizedRoute) {
void this.router.navigateByUrl(normalizedRoute);
}
break;
case 'copy':
@@ -951,13 +1070,13 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
if (action.command) {
void this.copyToClipboard(action.command);
}
if (action.route) {
void this.router.navigateByUrl(action.route);
if (normalizedRoute) {
void this.router.navigateByUrl(normalizedRoute);
}
break;
case 'details':
if (action.route) {
void this.router.navigateByUrl(action.route);
if (normalizedRoute) {
void this.router.navigateByUrl(normalizedRoute);
}
break;
}
@@ -1001,6 +1120,7 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
this.selectedIndex.set(0);
this.searchResponse.set(null);
this.activeDomainFilter.set('all');
this.pendingDomainFilter.set(null);
this.expandedCardKey.set(null);
this.isFocused.set(false);
this.escapeCount = 0;
@@ -1035,15 +1155,6 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
} catch {
// Ignore localStorage failures.
}
// Sprint 106 / G6: Also record to server-side history (fire-and-forget)
const resultCount = this.cards().length;
this.searchClient.recordAnalytics([{
eventType: 'query',
query: normalized,
resultCount,
durationMs: this.searchResponse()?.diagnostics?.durationMs,
}]);
}
/** Sprint 106 / G6: Load search history from server, merge with localStorage */
@@ -1062,14 +1173,33 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
/** Sprint 106 / G6: Emit analytics for search responses */
private emitSearchAnalytics(response: UnifiedSearchResponse): void {
if (response.cards.length === 0 && response.query.trim().length > 0) {
this.searchClient.recordAnalytics([{
eventType: 'zero_result',
query: response.query,
const normalized = response.query.trim();
if (!normalized) {
return;
}
const events: Array<{
eventType: 'query' | 'zero_result';
query: string;
resultCount: number;
durationMs?: number;
}> = [{
eventType: 'query' as const,
query: normalized,
resultCount: response.cards.length,
durationMs: response.diagnostics?.durationMs,
}];
if (response.cards.length === 0) {
events.push({
eventType: 'zero_result' as const,
query: normalized,
resultCount: 0,
durationMs: response.diagnostics?.durationMs,
}]);
});
}
this.searchClient.recordAnalytics(events);
}
/** Sprint 106 / G6: Emit analytics for card clicks */
@@ -1084,6 +1214,78 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
}]);
}
private consumeChatToSearchContext(): void {
const context = this.searchChatContext.consumeChatToSearch();
if (!context || !context.query.trim()) {
return;
}
const query = context.query.trim();
this.query.set(query);
this.selectedIndex.set(0);
this.searchResponse.set(null);
this.expandedCardKey.set(null);
this.isFocused.set(true);
this.pendingDomainFilter.set(context.domain ?? null);
this.searchTerms$.next(query);
this.saveRecentSearch(query);
setTimeout(() => this.searchInputRef?.nativeElement?.focus(), 0);
}
private buildAskAiPromptForCard(card: EntityCard): string {
switch (card.domain) {
case 'findings':
return `Tell me about ${card.title}, why it matters, and what action I should take first.`;
case 'vex':
return `Explain this VEX assessment for ${card.title} and what it means for release decisions.`;
case 'policy':
return `Explain this policy rule (${card.title}) and how it affects promotions.`;
case 'platform':
return `Explain this platform item (${card.title}) and what an operator should do next.`;
default:
return `Summarize ${card.title} and guide me through the next steps.`;
}
}
private buildAskAiPromptForSynthesis(): string {
const query = this.query().trim();
if (!query) {
return 'I need help understanding these search results and what to do next.';
}
return `I searched for "${query}". Help me understand the results and recommend a clear next action.`;
}
private normalizeActionRoute(route: string): string {
return normalizeSearchActionRoute(route);
}
private trackDegradedMode(response: UnifiedSearchResponse): void {
const mode = response.diagnostics?.mode ?? 'unknown';
const isDegraded = mode === 'legacy-fallback' || mode === 'fallback-empty';
if (isDegraded && !this.wasDegradedMode) {
this.searchClient.recordAnalytics([{
eventType: 'query',
query: '__degraded_mode_enter__',
domain: 'platform',
resultCount: response.cards.length,
durationMs: response.diagnostics?.durationMs,
}]);
} else if (!isDegraded && this.wasDegradedMode) {
this.searchClient.recordAnalytics([{
eventType: 'query',
query: '__degraded_mode_exit__',
domain: 'platform',
resultCount: response.cards.length,
durationMs: response.diagnostics?.durationMs,
}]);
}
this.wasDegradedMode = isDegraded;
}
clearSearchHistory(): void {
this.recentSearches.set([]);
try {
@@ -1123,4 +1325,26 @@ export class GlobalSearchComponent implements OnInit, OnDestroy {
textarea.remove();
}
}
private promptFeedbackComment(signal: 'helpful' | 'not_helpful'): string | undefined {
if (typeof window === 'undefined') {
return undefined;
}
const promptText = signal === 'helpful'
? 'Optional: what was most helpful?'
: 'Optional: what was missing or incorrect?';
const response = window.prompt(promptText);
if (!response) {
return undefined;
}
const normalized = response.trim();
if (!normalized) {
return undefined;
}
return normalized.slice(0, 500);
}
}

View File

@@ -0,0 +1,34 @@
export function normalizeSearchActionRoute(route: string): string {
if (!route.startsWith('/')) {
return route;
}
let parsedUrl: URL;
try {
parsedUrl = new URL(route, 'https://stellaops.local');
} catch {
return route;
}
const pathname = parsedUrl.pathname;
if (pathname.startsWith('/triage/findings/')) {
parsedUrl.pathname = `/security/findings/${pathname.substring('/triage/findings/'.length)}`;
} else if (pathname.startsWith('/vex-hub/')) {
const lookup = decodeURIComponent(pathname.substring('/vex-hub/'.length));
parsedUrl.pathname = '/security/advisories-vex';
parsedUrl.search = lookup ? `?q=${encodeURIComponent(lookup)}` : '';
} else if (pathname.startsWith('/proof-chain/')) {
const digest = decodeURIComponent(pathname.substring('/proof-chain/'.length));
parsedUrl.pathname = '/evidence/proofs';
parsedUrl.search = digest ? `?q=${encodeURIComponent(digest)}` : '';
} else if (pathname.startsWith('/policy/')) {
const lookup = decodeURIComponent(pathname.substring('/policy/'.length));
parsedUrl.pathname = '/ops/policy';
parsedUrl.search = lookup ? `?q=${encodeURIComponent(lookup)}` : '';
} else if (pathname.startsWith('/scans/')) {
parsedUrl.pathname = `/security/scans/${pathname.substring('/scans/'.length)}`;
}
return `${parsedUrl.pathname}${parsedUrl.search}${parsedUrl.hash}`;
}

View File

@@ -24,8 +24,8 @@ export const SECURITY_ROUTES: Routes = [
title: 'Security Triage',
data: { breadcrumb: 'Triage' },
loadComponent: () =>
import('../features/security/security-findings-page.component').then(
(m) => m.SecurityFindingsPageComponent,
import('../features/security/security-triage-chat-host.component').then(
(m) => m.SecurityTriageChatHostComponent,
),
},
{

View File

@@ -1,140 +1,106 @@
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { Router } from '@angular/router';
import { of } from 'rxjs';
import { NavigationEnd, Router } from '@angular/router';
import { Subject, of } from 'rxjs';
import { SearchClient } from '../../app/core/api/search.client';
import {
GlobalSearchComponent,
SearchResult,
} from '../../app/layout/global-search/global-search.component';
import { UnifiedSearchClient } from '../../app/core/api/unified-search.client';
import { AmbientContextService } from '../../app/core/services/ambient-context.service';
import { SearchChatContextService } from '../../app/core/services/search-chat-context.service';
import { GlobalSearchComponent } from '../../app/layout/global-search/global-search.component';
describe('GlobalSearchComponent', () => {
let fixture: ComponentFixture<GlobalSearchComponent>;
let component: GlobalSearchComponent;
let router: { navigateByUrl: jasmine.Spy };
let searchClient: { search: jasmine.Spy };
let searchClient: jasmine.SpyObj<UnifiedSearchClient>;
let routerEvents: Subject<unknown>;
let router: { url: string; events: Subject<unknown>; navigateByUrl: jasmine.Spy; navigate: jasmine.Spy };
let searchChatContext: jasmine.SpyObj<SearchChatContextService>;
beforeEach(async () => {
routerEvents = new Subject<unknown>();
router = {
url: '/security/triage',
events: routerEvents,
navigateByUrl: jasmine.createSpy('navigateByUrl').and.returnValue(Promise.resolve(true)),
navigate: jasmine.createSpy('navigate').and.returnValue(Promise.resolve(true)),
};
searchClient = {
search: jasmine.createSpy('search').and.returnValue(
of({
query: 'docker login fails',
groups: [
{
type: 'docs',
label: 'Docs',
totalCount: 1,
hasMore: false,
results: [
{
id: 'docs:docs/operations.md:docker-registry-login-fails',
type: 'docs',
title: 'Registry login troubleshooting',
subtitle: 'docs/operations/troubleshooting.md#docker-registry-login-fails',
description: 'Use custom CA bundle and verify trust store.',
route: '/docs/docs%2Foperations%2Ftroubleshooting.md#docker-registry-login-fails',
matchScore: 0.95,
open: {
kind: 'docs',
docs: {
path: 'docs/operations/troubleshooting.md',
anchor: 'docker-registry-login-fails',
spanStart: 42,
spanEnd: 68,
},
},
},
],
},
],
totalCount: 1,
durationMs: 4,
}),
),
};
searchClient = jasmine.createSpyObj('UnifiedSearchClient', [
'search',
'recordAnalytics',
'getHistory',
'clearHistory',
'submitFeedback',
]) as jasmine.SpyObj<UnifiedSearchClient>;
searchClient.search.and.returnValue(of({
query: 'a',
topK: 10,
cards: [],
synthesis: null,
diagnostics: {
ftsMatches: 0,
vectorMatches: 0,
entityCardCount: 0,
durationMs: 1,
usedVector: false,
mode: 'fts-only',
},
}));
searchClient.getHistory.and.returnValue(of([]));
searchChatContext = jasmine.createSpyObj('SearchChatContextService', [
'consumeChatToSearch',
'setSearchToChat',
]) as jasmine.SpyObj<SearchChatContextService>;
(searchChatContext as any).chatToSearchRequested$ = of();
searchChatContext.consumeChatToSearch.and.returnValue(null);
await TestBed.configureTestingModule({
imports: [GlobalSearchComponent],
providers: [
{ provide: Router, useValue: router },
{ provide: SearchClient, useValue: searchClient },
{ provide: UnifiedSearchClient, useValue: searchClient },
{
provide: AmbientContextService,
useValue: {
buildContextFilter: () => undefined,
},
},
{ provide: SearchChatContextService, useValue: searchChatContext },
],
}).compileComponents();
localStorage.clear();
fixture = TestBed.createComponent(GlobalSearchComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
afterEach(() => {
localStorage.clear();
});
async function waitForDebounce(): Promise<void> {
await new Promise((resolve) => setTimeout(resolve, 240));
function waitForDebounce(): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, 250));
}
it('renders the global search input and shortcut hint', () => {
const text = fixture.nativeElement.textContent as string;
const input = fixture.nativeElement.querySelector('input[aria-label="Global search"]') as HTMLInputElement;
expect(input).toBeTruthy();
expect(input.placeholder).toContain('Search docs');
expect(text).toContain('K');
it('renders the global search input', () => {
const input = fixture.nativeElement.querySelector('input[aria-label="Global search"]') as HTMLInputElement | null;
expect(input).not.toBeNull();
expect(input?.placeholder).toContain('Try:');
});
it('queries SearchClient and renders grouped results', async () => {
component.onFocus();
component.onQueryChange('docker login fails');
await waitForDebounce();
fixture.detectChanges();
expect(searchClient.search).toHaveBeenCalledWith('docker login fails');
expect(component.groupedResults().length).toBe(1);
expect(component.groupedResults()[0].type).toBe('docs');
expect(component.flatResults().length).toBe(1);
});
it('does not query API for terms shorter than two characters', async () => {
it('queries unified search for one-character query terms', async () => {
component.onFocus();
component.onQueryChange('a');
await waitForDebounce();
expect(searchClient.search).toHaveBeenCalledWith('a', undefined);
});
it('consumes chat-to-search context when navigation changes', () => {
searchChatContext.consumeChatToSearch.and.returnValue({
query: 'CVE-2024-21626',
domain: 'findings',
} as any);
routerEvents.next(new NavigationEnd(1, '/security/triage', '/security/triage'));
fixture.detectChanges();
expect(searchClient.search).not.toHaveBeenCalled();
expect(component.searchResponse()).toBeNull();
});
it('navigates to selected result and persists recent search', () => {
component.query.set('docker login fails');
const result: SearchResult = {
id: 'docs:troubleshooting',
type: 'docs',
title: 'Registry login troubleshooting',
subtitle: 'docs/operations/troubleshooting.md#docker-registry-login-fails',
description: 'Use custom CA bundle and verify trust store.',
route: '/docs/docs%2Foperations%2Ftroubleshooting.md#docker-registry-login-fails',
matchScore: 0.95,
open: {
kind: 'docs',
docs: {
path: 'docs/operations/troubleshooting.md',
anchor: 'docker-registry-login-fails',
spanStart: 42,
spanEnd: 68,
},
},
};
component.onSelect(result);
expect(router.navigateByUrl).toHaveBeenCalledWith('/docs/docs%2Foperations%2Ftroubleshooting.md#docker-registry-login-fails');
const stored = JSON.parse(localStorage.getItem('stella-recent-searches') ?? '[]') as string[];
expect(stored[0]).toBe('docker login fails');
expect(component.query()).toBe('CVE-2024-21626');
});
});

View File

@@ -0,0 +1,27 @@
import { normalizeSearchActionRoute } from '../../app/layout/global-search/search-route-matrix';
describe('normalizeSearchActionRoute', () => {
it('maps findings routes into security finding detail', () => {
expect(normalizeSearchActionRoute('/triage/findings/abc-123')).toBe('/security/findings/abc-123');
});
it('maps vex hub routes into advisories page query', () => {
expect(normalizeSearchActionRoute('/vex-hub/CVE-2024-21626')).toBe('/security/advisories-vex?q=CVE-2024-21626');
});
it('maps proof-chain routes into evidence proofs query', () => {
expect(normalizeSearchActionRoute('/proof-chain/sha256:deadbeef')).toBe('/evidence/proofs?q=sha256%3Adeadbeef');
});
it('maps policy routes into policy search route', () => {
expect(normalizeSearchActionRoute('/policy/DENY-CRITICAL-PROD')).toBe('/ops/policy?q=DENY-CRITICAL-PROD');
});
it('maps scan routes into security scans route', () => {
expect(normalizeSearchActionRoute('/scans/scan-42')).toBe('/security/scans/scan-42');
});
it('preserves already valid app routes', () => {
expect(normalizeSearchActionRoute('/docs/ops/runbook#overview')).toBe('/docs/ops/runbook#overview');
});
});