feat: Enhance Task Runner with simulation and failure policy support
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Added tests for output projection and failure policy population in TaskPackPlanner.
- Introduced new failure policy manifest in TestManifests.
- Implemented simulation endpoints in the web service for task execution.
- Created TaskRunnerServiceOptions for configuration management.
- Updated appsettings.json to include TaskRunner configuration.
- Enhanced PackRunWorkerService to handle execution graphs and state management.
- Added support for parallel execution and conditional steps in the worker service.
- Updated documentation to reflect new features and changes in execution flow.
This commit is contained in:
master
2025-11-04 19:05:50 +02:00
parent 2eb6852d34
commit 3bd0955202
83 changed files with 15161 additions and 10678 deletions

View File

@@ -1,20 +1,74 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Queue;
using StellaOps.AdvisoryAI.Worker.Services;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Orchestration;
var builder = Host.CreateApplicationBuilder(args);
builder.Services.AddMetrics();
builder.Services.AddAdvisoryPipeline(options => builder.Configuration.GetSection("AdvisoryAI:Pipeline").Bind(options));
builder.Services.AddAdvisoryPipelineInfrastructure();
builder.Configuration.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
.AddJsonFile($"appsettings.{builder.Environment.EnvironmentName}.json", optional: true, reloadOnChange: true)
.AddEnvironmentVariables(prefix: "ADVISORYAI_");
builder.Services.Configure<AdvisoryPlanCacheOptions>(builder.Configuration.GetSection("AdvisoryAI:PlanCache"));
builder.Services.Configure<AdvisoryTaskQueueOptions>(builder.Configuration.GetSection("AdvisoryAI:TaskQueue"));
builder.Services.AddHostedService<AdvisoryTaskWorker>();
builder.Services.AddAdvisoryAiCore(builder.Configuration);
builder.Services.AddHostedService<AdvisoryPipelineWorker>();
var host = builder.Build();
await host.RunAsync();
internal sealed class AdvisoryPipelineWorker : BackgroundService
{
private readonly IAdvisoryPipelineQueueReceiver _queue;
private readonly IAdvisoryPipelineOrchestrator _orchestrator;
private readonly AdvisoryAiMetrics _metrics;
private readonly ILogger<AdvisoryPipelineWorker> _logger;
public AdvisoryPipelineWorker(
IAdvisoryPipelineQueueReceiver queue,
IAdvisoryPipelineOrchestrator orchestrator,
AdvisoryAiMetrics metrics,
ILogger<AdvisoryPipelineWorker> logger)
{
_queue = queue ?? throw new ArgumentNullException(nameof(queue));
_orchestrator = orchestrator ?? throw new ArgumentNullException(nameof(orchestrator));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("Advisory AI worker started");
while (!stoppingToken.IsCancellationRequested)
{
try
{
var message = await _queue.DequeueAsync(stoppingToken).ConfigureAwait(false);
if (message is null)
{
continue;
}
_metrics.RecordProcessed(message.Request.TaskType.ToString());
_logger.LogInformation(
"Processing advisory pipeline message {CacheKey} for {Task}",
message.PlanCacheKey,
message.Request.TaskType);
// TODO: Execute prompt assembly, guardrails, and inference workflows in future tasks.
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Unhandled exception while processing advisory pipeline queue");
await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false);
}
}
_logger.LogInformation("Advisory AI worker stopped");
}
}