Add dummy LLM provider, update Concelier sources and JobEngine endpoints

- AdvisoryAI: DummyLlmProvider for offline/testing scenarios,
  wire in LlmProviderFactory
- Concelier: source definitions, registry, and management endpoint updates
- JobEngine: approval and release endpoint updates
- etc/llm-providers/dummy.yaml config

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
master
2026-03-30 17:25:48 +03:00
parent a6ffb38ecf
commit 260fce8ef8
8 changed files with 342 additions and 55 deletions

View File

@@ -0,0 +1,80 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using System.Runtime.CompilerServices;
namespace StellaOps.AdvisoryAI.Inference.LlmProviders;
/// <summary>
/// Dummy LLM provider for testing. Reverses the user's question as the "answer"
/// and streams it word by word to exercise the full SSE pipeline.
/// </summary>
public sealed class DummyLlmProvider : ILlmProvider
{
public string ProviderId => "dummy";
public Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
=> Task.FromResult(true);
public Task<LlmCompletionResult> CompleteAsync(
LlmCompletionRequest request, CancellationToken cancellationToken = default)
{
var answer = BuildAnswer(request.UserPrompt);
return Task.FromResult(new LlmCompletionResult
{
Content = answer,
ModelId = "dummy-echo-reverse",
ProviderId = "dummy",
InputTokens = request.UserPrompt.Split(' ').Length,
OutputTokens = answer.Split(' ').Length,
FinishReason = "stop",
Deterministic = true,
});
}
public async IAsyncEnumerable<LlmStreamChunk> CompleteStreamAsync(
LlmCompletionRequest request,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var answer = BuildAnswer(request.UserPrompt);
var words = answer.Split(' ');
foreach (var word in words)
{
cancellationToken.ThrowIfCancellationRequested();
await Task.Delay(60, cancellationToken); // simulate token-by-token streaming
yield return new LlmStreamChunk { Content = word + " ", IsFinal = false };
}
yield return new LlmStreamChunk { Content = "", IsFinal = true, FinishReason = "stop" };
}
public void Dispose() { }
private static string BuildAnswer(string userPrompt)
{
var reversed = new string(userPrompt.Reverse().ToArray());
return $"[Dummy echo-reverse provider] You asked: \"{userPrompt}\" — Reversed: \"{reversed}\"";
}
}
/// <summary>
/// Plugin registration for the dummy provider.
/// </summary>
public sealed class DummyLlmProviderPlugin : ILlmProviderPlugin
{
public string Name => "Dummy Echo-Reverse";
public string ProviderId => "dummy";
public string DisplayName => "Dummy Echo-Reverse";
public string Description => "Test provider that echoes and reverses the input. No external API needed.";
public string DefaultConfigFileName => "dummy.yaml";
public bool IsAvailable(IServiceProvider services) => true;
public LlmProviderConfigValidation ValidateConfiguration(IConfiguration configuration)
=> LlmProviderConfigValidation.Success();
public ILlmProvider Create(IServiceProvider services, IConfiguration configuration)
=> new DummyLlmProvider();
}

View File

@@ -135,6 +135,7 @@ public static class LlmProviderPluginExtensions
catalog.RegisterPlugin(new GeminiLlmProviderPlugin());
catalog.RegisterPlugin(new LlamaServerLlmProviderPlugin());
catalog.RegisterPlugin(new OllamaLlmProviderPlugin());
catalog.RegisterPlugin(new DummyLlmProviderPlugin());
// Load configurations from directory
var fullPath = Path.GetFullPath(configDirectory);
@@ -170,6 +171,7 @@ public static class LlmProviderPluginExtensions
catalog.RegisterPlugin(new GeminiLlmProviderPlugin());
catalog.RegisterPlugin(new LlamaServerLlmProviderPlugin());
catalog.RegisterPlugin(new OllamaLlmProviderPlugin());
catalog.RegisterPlugin(new DummyLlmProviderPlugin());
configureCatalog(catalog);