Files
git.stella-ops.org/src/AdvisoryAI/StellaOps.AdvisoryAI.Plugin.Unified/LlmPluginAdapter.cs
2026-01-12 12:24:17 +02:00

218 lines
7.5 KiB
C#

namespace StellaOps.AdvisoryAI.Plugin.Unified;
using System.Runtime.CompilerServices;
using StellaOps.AdvisoryAI.Inference.LlmProviders;
using StellaOps.Plugin.Abstractions;
using StellaOps.Plugin.Abstractions.Capabilities;
using StellaOps.Plugin.Abstractions.Context;
using StellaOps.Plugin.Abstractions.Health;
using StellaOps.Plugin.Abstractions.Lifecycle;
// Type aliases to disambiguate between AdvisoryAI and Plugin.Abstractions types
using AdvisoryLlmRequest = StellaOps.AdvisoryAI.Inference.LlmProviders.LlmCompletionRequest;
using AdvisoryLlmResult = StellaOps.AdvisoryAI.Inference.LlmProviders.LlmCompletionResult;
using AdvisoryStreamChunk = StellaOps.AdvisoryAI.Inference.LlmProviders.LlmStreamChunk;
using PluginLlmRequest = StellaOps.Plugin.Abstractions.Capabilities.LlmCompletionRequest;
using PluginLlmResult = StellaOps.Plugin.Abstractions.Capabilities.LlmCompletionResult;
using PluginStreamChunk = StellaOps.Plugin.Abstractions.Capabilities.LlmStreamChunk;
/// <summary>
/// Adapts an existing ILlmProvider to the unified IPlugin and ILlmCapability interfaces.
/// This enables gradual migration of AdvisoryAI LLM providers to the unified plugin architecture.
/// </summary>
public sealed class LlmPluginAdapter : IPlugin, ILlmCapability
{
private readonly ILlmProvider _inner;
private readonly ILlmProviderPlugin _plugin;
private readonly int _priority;
private IPluginContext? _context;
private PluginLifecycleState _state = PluginLifecycleState.Discovered;
private List<LlmModelInfo> _models = new();
/// <summary>
/// Creates a new adapter for an existing LLM provider.
/// </summary>
/// <param name="inner">The existing LLM provider to wrap.</param>
/// <param name="plugin">The plugin metadata for this provider.</param>
/// <param name="priority">Provider priority (higher = preferred).</param>
public LlmPluginAdapter(ILlmProvider inner, ILlmProviderPlugin plugin, int priority = 10)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
_plugin = plugin ?? throw new ArgumentNullException(nameof(plugin));
_priority = priority;
}
/// <inheritdoc />
public PluginInfo Info => new(
Id: $"com.stellaops.llm.{_inner.ProviderId}",
Name: _plugin.DisplayName,
Version: "1.0.0",
Vendor: "Stella Ops",
Description: _plugin.Description);
/// <inheritdoc />
public PluginTrustLevel TrustLevel => PluginTrustLevel.BuiltIn;
/// <inheritdoc />
public PluginCapabilities Capabilities => PluginCapabilities.Llm | PluginCapabilities.Network;
/// <inheritdoc />
public PluginLifecycleState State => _state;
#region ILlmCapability
/// <inheritdoc />
public string ProviderId => _inner.ProviderId;
/// <inheritdoc />
public int Priority => _priority;
/// <inheritdoc />
public IReadOnlyList<LlmModelInfo> AvailableModels => _models;
/// <inheritdoc />
public Task<bool> IsAvailableAsync(CancellationToken ct)
{
return _inner.IsAvailableAsync(ct);
}
/// <inheritdoc />
public async Task<PluginLlmResult> CompleteAsync(PluginLlmRequest request, CancellationToken ct)
{
var advisoryRequest = ToAdvisoryRequest(request);
var result = await _inner.CompleteAsync(advisoryRequest, ct);
return ToPluginResult(result);
}
/// <inheritdoc />
public IAsyncEnumerable<PluginStreamChunk> CompleteStreamAsync(PluginLlmRequest request, CancellationToken ct)
{
var advisoryRequest = ToAdvisoryRequest(request);
return StreamAdapter(_inner.CompleteStreamAsync(advisoryRequest, ct), ct);
}
/// <inheritdoc />
public Task<LlmEmbeddingResult?> EmbedAsync(string text, CancellationToken ct)
{
// Embedding is not supported by the base ILlmProvider interface
// Specific providers that support embedding would need custom adapters
return Task.FromResult<LlmEmbeddingResult?>(null);
}
#endregion
#region IPlugin
/// <inheritdoc />
public async Task InitializeAsync(IPluginContext context, CancellationToken ct)
{
_context = context;
_state = PluginLifecycleState.Initializing;
// Check if the provider is available
var available = await _inner.IsAvailableAsync(ct);
if (!available)
{
_state = PluginLifecycleState.Failed;
throw new InvalidOperationException($"LLM provider '{_inner.ProviderId}' is not available");
}
// Initialize with a default model entry (provider-specific models would be discovered at runtime)
_models = new List<LlmModelInfo>
{
new(
Id: _inner.ProviderId,
Name: _plugin.DisplayName,
Description: _plugin.Description,
ParameterCount: null,
ContextLength: null,
Capabilities: new[] { "chat", "completion" })
};
_state = PluginLifecycleState.Active;
context.Logger.Info("LLM plugin adapter initialized for {ProviderId}", _inner.ProviderId);
}
/// <inheritdoc />
public async Task<HealthCheckResult> HealthCheckAsync(CancellationToken ct)
{
try
{
var available = await _inner.IsAvailableAsync(ct);
if (available)
{
return HealthCheckResult.Healthy()
.WithDetails(new Dictionary<string, object>
{
["providerId"] = _inner.ProviderId,
["priority"] = _priority
});
}
return HealthCheckResult.Unhealthy($"LLM provider '{_inner.ProviderId}' is not available");
}
catch (Exception ex)
{
return HealthCheckResult.Unhealthy(ex);
}
}
/// <inheritdoc />
public ValueTask DisposeAsync()
{
_state = PluginLifecycleState.Stopped;
_inner.Dispose();
return ValueTask.CompletedTask;
}
#endregion
#region Type Mapping
private static AdvisoryLlmRequest ToAdvisoryRequest(PluginLlmRequest request)
{
return new AdvisoryLlmRequest
{
UserPrompt = request.UserPrompt,
SystemPrompt = request.SystemPrompt,
Model = request.Model,
Temperature = request.Temperature,
MaxTokens = request.MaxTokens,
Seed = request.Seed,
StopSequences = request.StopSequences,
RequestId = request.RequestId
};
}
private static PluginLlmResult ToPluginResult(AdvisoryLlmResult result)
{
return new PluginLlmResult(
Content: result.Content,
ModelId: result.ModelId,
ProviderId: result.ProviderId,
InputTokens: result.InputTokens,
OutputTokens: result.OutputTokens,
TimeToFirstTokenMs: result.TimeToFirstTokenMs,
TotalTimeMs: result.TotalTimeMs,
FinishReason: result.FinishReason,
Deterministic: result.Deterministic,
RequestId: result.RequestId);
}
private static async IAsyncEnumerable<PluginStreamChunk> StreamAdapter(
IAsyncEnumerable<AdvisoryStreamChunk> source,
[EnumeratorCancellation] CancellationToken ct)
{
await foreach (var chunk in source.WithCancellation(ct))
{
yield return new PluginStreamChunk(
Content: chunk.Content,
IsFinal: chunk.IsFinal,
FinishReason: chunk.FinishReason);
}
}
#endregion
}