feat: Enhance Authority Identity Provider Registry with Bootstrap Capability
- Added support for bootstrap providers in AuthorityIdentityProviderRegistry. - Introduced a new property for bootstrap providers and updated AggregateCapabilities. - Updated relevant methods to handle bootstrap capabilities during provider registration. feat: Introduce Sealed Mode Status in OpenIddict Handlers - Added SealedModeStatusProperty to AuthorityOpenIddictConstants. - Enhanced ValidateClientCredentialsHandler, ValidatePasswordGrantHandler, and ValidateRefreshTokenGrantHandler to validate sealed mode evidence. - Implemented logic to handle airgap seal confirmation requirements. feat: Update Program Configuration for Sealed Mode - Registered IAuthoritySealedModeEvidenceValidator in Program.cs. - Added logging for bootstrap capabilities in identity provider plugins. - Implemented checks for bootstrap support in API endpoints. chore: Update Tasks and Documentation - Marked AUTH-MTLS-11-002 as DONE in TASKS.md. - Updated documentation to reflect changes in sealed mode and bootstrap capabilities. fix: Improve CLI Command Handlers Output - Enhanced output formatting for command responses and prompts in CommandHandlers.cs. feat: Extend Advisory AI Models - Added Response property to AdvisoryPipelineOutputModel for better output handling. fix: Adjust Concelier Web Service Authentication - Improved JWT token handling in Concelier Web Service to ensure proper token extraction and logging. test: Enhance Web Service Endpoints Tests - Added detailed logging for authentication failures in WebServiceEndpointsTests. - Enabled PII logging for better debugging of authentication issues. feat: Introduce Air-Gap Configuration Options - Added AuthorityAirGapOptions and AuthoritySealedModeOptions to StellaOpsAuthorityOptions. - Implemented validation logic for air-gap configurations to ensure proper setup.
This commit is contained in:
@@ -0,0 +1,215 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Guardrails;
|
||||
using StellaOps.AdvisoryAI.Orchestration;
|
||||
using StellaOps.AdvisoryAI.Prompting;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Inference;
|
||||
|
||||
public interface IAdvisoryInferenceClient
|
||||
{
|
||||
Task<AdvisoryInferenceResult> GenerateAsync(
|
||||
AdvisoryTaskPlan plan,
|
||||
AdvisoryPrompt prompt,
|
||||
AdvisoryGuardrailResult guardrailResult,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed record AdvisoryInferenceResult(
|
||||
string Content,
|
||||
string? ModelId,
|
||||
int? PromptTokens,
|
||||
int? CompletionTokens,
|
||||
ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public static AdvisoryInferenceResult FromLocal(string content)
|
||||
=> new(
|
||||
content,
|
||||
"local.prompt-preview",
|
||||
null,
|
||||
null,
|
||||
ImmutableDictionary.Create<string, string>(StringComparer.Ordinal));
|
||||
|
||||
public static AdvisoryInferenceResult FromFallback(string content, string reason, string? details = null)
|
||||
{
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
builder["inference.fallback_reason"] = reason;
|
||||
if (!string.IsNullOrWhiteSpace(details))
|
||||
{
|
||||
builder["inference.fallback_details"] = details!;
|
||||
}
|
||||
|
||||
return new AdvisoryInferenceResult(
|
||||
content,
|
||||
"remote.fallback",
|
||||
null,
|
||||
null,
|
||||
builder.ToImmutable());
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class LocalAdvisoryInferenceClient : IAdvisoryInferenceClient
|
||||
{
|
||||
public Task<AdvisoryInferenceResult> GenerateAsync(
|
||||
AdvisoryTaskPlan plan,
|
||||
AdvisoryPrompt prompt,
|
||||
AdvisoryGuardrailResult guardrailResult,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(prompt);
|
||||
ArgumentNullException.ThrowIfNull(guardrailResult);
|
||||
|
||||
var sanitized = guardrailResult.SanitizedPrompt ?? prompt.Prompt ?? string.Empty;
|
||||
return Task.FromResult(AdvisoryInferenceResult.FromLocal(sanitized));
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class RemoteAdvisoryInferenceClient : IAdvisoryInferenceClient
|
||||
{
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly IOptions<AdvisoryAiInferenceOptions> _options;
|
||||
private readonly ILogger<RemoteAdvisoryInferenceClient>? _logger;
|
||||
|
||||
public RemoteAdvisoryInferenceClient(
|
||||
HttpClient httpClient,
|
||||
IOptions<AdvisoryAiInferenceOptions> options,
|
||||
ILogger<RemoteAdvisoryInferenceClient>? logger = null)
|
||||
{
|
||||
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<AdvisoryInferenceResult> GenerateAsync(
|
||||
AdvisoryTaskPlan plan,
|
||||
AdvisoryPrompt prompt,
|
||||
AdvisoryGuardrailResult guardrailResult,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(plan);
|
||||
ArgumentNullException.ThrowIfNull(prompt);
|
||||
ArgumentNullException.ThrowIfNull(guardrailResult);
|
||||
|
||||
var sanitized = guardrailResult.SanitizedPrompt ?? prompt.Prompt ?? string.Empty;
|
||||
var inferenceOptions = _options.Value ?? new AdvisoryAiInferenceOptions();
|
||||
var remote = inferenceOptions.Remote ?? new AdvisoryAiRemoteInferenceOptions();
|
||||
|
||||
if (remote.BaseAddress is null)
|
||||
{
|
||||
_logger?.LogWarning("Remote inference is enabled but no base address was configured. Falling back to local prompt output.");
|
||||
return AdvisoryInferenceResult.FromLocal(sanitized);
|
||||
}
|
||||
|
||||
var endpoint = string.IsNullOrWhiteSpace(remote.Endpoint)
|
||||
? "/v1/inference"
|
||||
: remote.Endpoint;
|
||||
|
||||
var request = new RemoteInferenceRequest(
|
||||
TaskType: plan.Request.TaskType.ToString(),
|
||||
Profile: plan.Request.Profile,
|
||||
Prompt: sanitized,
|
||||
Metadata: prompt.Metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal),
|
||||
Citations: prompt.Citations
|
||||
.Select(citation => new RemoteInferenceCitation(citation.Index, citation.DocumentId, citation.ChunkId))
|
||||
.ToArray());
|
||||
|
||||
try
|
||||
{
|
||||
using var response = await _httpClient.PostAsJsonAsync(endpoint, request, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
_logger?.LogWarning(
|
||||
"Remote inference request failed with status {StatusCode}. Response body: {Body}",
|
||||
response.StatusCode,
|
||||
body);
|
||||
return AdvisoryInferenceResult.FromFallback(sanitized, $"remote_http_{(int)response.StatusCode}", body);
|
||||
}
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<RemoteInferenceResponse>(cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
if (payload is null || string.IsNullOrWhiteSpace(payload.Content))
|
||||
{
|
||||
_logger?.LogWarning("Remote inference response was empty. Falling back to sanitized prompt.");
|
||||
return AdvisoryInferenceResult.FromFallback(sanitized, "remote_empty_response");
|
||||
}
|
||||
|
||||
var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
if (payload.Metadata is not null)
|
||||
{
|
||||
foreach (var pair in payload.Metadata)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(pair.Key) && pair.Value is not null)
|
||||
{
|
||||
metadataBuilder[pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new AdvisoryInferenceResult(
|
||||
payload.Content,
|
||||
payload.ModelId,
|
||||
payload.Usage?.PromptTokens,
|
||||
payload.Usage?.CompletionTokens,
|
||||
metadataBuilder.ToImmutable());
|
||||
}
|
||||
catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger?.LogWarning("Remote inference timed out before completion. Returning sanitized prompt.");
|
||||
return AdvisoryInferenceResult.FromFallback(sanitized, "remote_timeout");
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger?.LogWarning(ex, "Remote inference HTTP request failed. Returning sanitized prompt.");
|
||||
return AdvisoryInferenceResult.FromFallback(sanitized, "remote_http_exception", ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record RemoteInferenceRequest(
|
||||
string TaskType,
|
||||
string Profile,
|
||||
string Prompt,
|
||||
IReadOnlyDictionary<string, string> Metadata,
|
||||
IReadOnlyList<RemoteInferenceCitation> Citations);
|
||||
|
||||
private sealed record RemoteInferenceCitation(int Index, string DocumentId, string ChunkId);
|
||||
|
||||
private sealed record RemoteInferenceResponse(
|
||||
[property: JsonPropertyName("content")] string Content,
|
||||
[property: JsonPropertyName("modelId")] string? ModelId,
|
||||
[property: JsonPropertyName("usage")] RemoteInferenceUsage? Usage,
|
||||
[property: JsonPropertyName("metadata")] Dictionary<string, string>? Metadata);
|
||||
|
||||
private sealed record RemoteInferenceUsage(
|
||||
[property: JsonPropertyName("promptTokens")] int? PromptTokens,
|
||||
[property: JsonPropertyName("completionTokens")] int? CompletionTokens);
|
||||
}
|
||||
|
||||
public sealed class AdvisoryAiInferenceOptions
|
||||
{
|
||||
public AdvisoryAiInferenceMode Mode { get; set; } = AdvisoryAiInferenceMode.Local;
|
||||
|
||||
public AdvisoryAiRemoteInferenceOptions Remote { get; set; } = new();
|
||||
}
|
||||
|
||||
public sealed class AdvisoryAiRemoteInferenceOptions
|
||||
{
|
||||
public Uri? BaseAddress { get; set; }
|
||||
|
||||
public string Endpoint { get; set; } = "/v1/inference";
|
||||
|
||||
public string? ApiKey { get; set; }
|
||||
|
||||
public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30);
|
||||
}
|
||||
|
||||
public enum AdvisoryAiInferenceMode
|
||||
{
|
||||
Local,
|
||||
Remote
|
||||
}
|
||||
Reference in New Issue
Block a user