audit, advisories and doctors/setup work

This commit is contained in:
master
2026-01-13 18:53:39 +02:00
parent 9ca7cb183e
commit d7be6ba34b
811 changed files with 54242 additions and 4056 deletions

View File

@@ -2,12 +2,14 @@ using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime.CompilerServices;
using System.Threading.RateLimiting;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.RateLimiting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Attestation;
@@ -15,6 +17,7 @@ using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Chat;
using StellaOps.Evidence.Pack;
using StellaOps.AdvisoryAI.Diagnostics;
using StellaOps.AdvisoryAI.Evidence;
using StellaOps.AdvisoryAI.Explanation;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Metrics;
@@ -36,6 +39,7 @@ builder.Configuration
.AddEnvironmentVariables(prefix: "ADVISORYAI__");
builder.Services.AddAdvisoryAiCore(builder.Configuration);
builder.Services.AddAdvisoryChat(builder.Configuration);
// Authorization service
builder.Services.AddSingleton<StellaOps.AdvisoryAI.WebService.Services.IAuthorizationService, StellaOps.AdvisoryAI.WebService.Services.HeaderBasedAuthorizationService>();
@@ -59,6 +63,7 @@ builder.Services.AddInMemoryAiAttestationStore();
// Evidence Packs (Sprint: SPRINT_20260109_011_005 Task: EVPK-010)
builder.Services.AddEvidencePack();
builder.Services.TryAddSingleton<IEvidencePackSigner, NullEvidencePackSigner>();
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddOpenApi();
@@ -189,6 +194,9 @@ app.MapDelete("/v1/advisory-ai/conversations/{conversationId}", HandleDeleteConv
app.MapGet("/v1/advisory-ai/conversations", HandleListConversations)
.RequireRateLimiting("advisory-ai");
// Chat gateway endpoints (controlled conversational interface)
app.MapChatEndpoints();
// AI Attestations endpoints (Sprint: SPRINT_20260109_011_001 Task: AIAT-009)
app.MapAttestationEndpoints();
@@ -1096,10 +1104,40 @@ static async Task<IResult> HandleAddTurn(
? null
: assistantTurn.ProposedActions.Select(StellaOps.AdvisoryAI.WebService.Contracts.ProposedActionResponse.FromAction).ToList(),
GroundingScore = 1.0, // Placeholder
TokenCount = assistantContent.Split(' ').Length, // Rough estimate
TokenCount = assistantContent.Split(' ').Length, // Rough estimate
DurationMs = (long)elapsed.TotalMilliseconds
};
if (request.Stream)
{
httpContext.Response.ContentType = "text/event-stream";
httpContext.Response.Headers.CacheControl = "no-cache";
httpContext.Response.Headers.Connection = "keep-alive";
if (responseStreamer is null)
{
await httpContext.Response.WriteAsync(
"event: token\n" +
$"data: {assistantContent}\n\n",
cancellationToken);
await httpContext.Response.Body.FlushAsync(cancellationToken);
return Results.Empty;
}
await foreach (var streamEvent in responseStreamer.StreamResponseAsync(
StreamPlaceholderTokens(assistantContent, cancellationToken),
conversationId,
assistantTurn.TurnId,
cancellationToken))
{
var payload = ChatResponseStreamer.FormatAsSSE(streamEvent);
await httpContext.Response.WriteAsync(payload, cancellationToken);
await httpContext.Response.Body.FlushAsync(cancellationToken);
}
return Results.Empty;
}
return Results.Ok(response);
}
catch (ConversationNotFoundException)
@@ -1180,25 +1218,63 @@ static async Task<IResult> HandleListConversations(
static bool EnsureChatAuthorized(HttpContext context)
{
if (!context.Request.Headers.TryGetValue("X-StellaOps-Scopes", out var scopes))
var tokens = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (context.Request.Headers.TryGetValue("X-StellaOps-Scopes", out var scopes))
{
return false;
AddHeaderTokens(tokens, scopes);
}
var allowed = scopes
.SelectMany(value => value?.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) ?? [])
.ToHashSet(StringComparer.OrdinalIgnoreCase);
if (context.Request.Headers.TryGetValue("X-StellaOps-Roles", out var roles))
{
AddHeaderTokens(tokens, roles);
}
return allowed.Contains("advisory:run") || allowed.Contains("advisory:chat");
return tokens.Contains("advisory:run")
|| tokens.Contains("advisory:chat")
|| tokens.Contains("chat:user")
|| tokens.Contains("chat:admin");
}
static void AddHeaderTokens(HashSet<string> target, IEnumerable<string> values)
{
foreach (var value in values)
{
if (string.IsNullOrWhiteSpace(value))
{
continue;
}
foreach (var token in value.Split(
new[] { ' ', ',' },
StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
{
target.Add(token);
}
}
}
static string GeneratePlaceholderResponse(string userMessage)
{
// Placeholder implementation - in production this would call the LLM
// Placeholder implementation - in production this would call the LLM
return $"I received your message: \"{userMessage}\". This is a placeholder response. " +
"The full chat functionality with grounded responses will be implemented when the LLM pipeline is connected.";
}
static async IAsyncEnumerable<TokenChunk> StreamPlaceholderTokens(
string content,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
foreach (var token in content.Split(
' ',
StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
{
cancellationToken.ThrowIfCancellationRequested();
yield return new TokenChunk { Content = token + " " };
await Task.Yield();
}
}
internal sealed record PipelinePlanRequest(
AdvisoryTaskType? TaskType,
string AdvisoryKey,
@@ -1232,3 +1308,9 @@ internal sealed record BatchPipelinePlanRequest
{
public IReadOnlyList<PipelinePlanRequest> Requests { get; init; } = Array.Empty<PipelinePlanRequest>();
}
// Make Program class accessible for WebApplicationFactory in tests
namespace StellaOps.AdvisoryAI.WebService
{
public partial class Program { }
}