feat: Enhance Task Runner with simulation and failure policy support
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Added tests for output projection and failure policy population in TaskPackPlanner.
- Introduced new failure policy manifest in TestManifests.
- Implemented simulation endpoints in the web service for task execution.
- Created TaskRunnerServiceOptions for configuration management.
- Updated appsettings.json to include TaskRunner configuration.
- Enhanced PackRunWorkerService to handle execution graphs and state management.
- Added support for parallel execution and conditional steps in the worker service.
- Updated documentation to reflect new features and changes in execution flow.
This commit is contained in:
master
2025-11-04 19:05:50 +02:00
parent 2eb6852d34
commit 3bd0955202
83 changed files with 15161 additions and 10678 deletions

View File

@@ -0,0 +1,28 @@
using System.Diagnostics.Metrics;
namespace StellaOps.AdvisoryAI.Hosting;
public sealed class AdvisoryAiMetrics
{
private static readonly Meter Meter = new("StellaOps.AdvisoryAI", "1.0.0");
private readonly Counter<long> _requests;
private readonly Counter<long> _queuePublished;
private readonly Counter<long> _queueProcessed;
public AdvisoryAiMetrics()
{
_requests = Meter.CreateCounter<long>("advisory_ai_pipeline_requests_total");
_queuePublished = Meter.CreateCounter<long>("advisory_ai_pipeline_messages_enqueued_total");
_queueProcessed = Meter.CreateCounter<long>("advisory_ai_pipeline_messages_processed_total");
}
public void RecordRequest(string taskType)
=> _requests.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
public void RecordEnqueued(string taskType)
=> _queuePublished.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
public void RecordProcessed(string taskType)
=> _queueProcessed.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
}

View File

@@ -0,0 +1,37 @@
using System;
using System.IO;
namespace StellaOps.AdvisoryAI.Hosting;
public sealed class AdvisoryAiServiceOptions
{
public Uri? SbomBaseAddress { get; set; }
public string? SbomTenant { get; set; }
public string SbomTenantHeaderName { get; set; } = X-StellaOps-Tenant;
public AdvisoryAiQueueOptions Queue { get; set; } = new();
internal string ResolveQueueDirectory(string contentRoot)
{
var path = Queue.DirectoryPath;
if (string.IsNullOrWhiteSpace(path))
{
path = Path.Combine(contentRoot, data, advisory-ai, queue);
}
if (!Path.IsPathFullyQualified(path))
{
path = Path.GetFullPath(Path.Combine(contentRoot, path));
}
Directory.CreateDirectory(path);
return path;
}
}
public sealed class AdvisoryAiQueueOptions
{
public string DirectoryPath { get; set; } = Path.Combine(data, advisory-ai, queue);
}

View File

@@ -0,0 +1,17 @@
using System;
using System.Globalization;
namespace StellaOps.AdvisoryAI.Hosting;
internal static class AdvisoryAiServiceOptionsValidator
{
public static void Validate(AdvisoryAiServiceOptions options)
{
if (options is null)
{
throw new ArgumentNullException(nameof(options));
}
if (options.SbomBaseAddress is null || !options.SbomBaseAddress.IsAbsoluteUri)
{
throw new InvalidOperationException(AdvisoryAI:SbomBaseAddress

View File

@@ -0,0 +1,168 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Orchestration;
namespace StellaOps.AdvisoryAI.Hosting;
public interface IAdvisoryPipelineQueuePublisher
{
Task EnqueueAsync(AdvisoryPipelineExecutionMessage message, CancellationToken cancellationToken);
}
public interface IAdvisoryPipelineQueueReceiver
{
Task<AdvisoryPipelineExecutionMessage?> DequeueAsync(CancellationToken cancellationToken);
}
internal sealed class FileSystemAdvisoryPipelineQueue : IAdvisoryPipelineQueuePublisher, IAdvisoryPipelineQueueReceiver
{
private readonly ILogger<FileSystemAdvisoryPipelineQueue> _logger;
private readonly string _queueDirectory;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
public FileSystemAdvisoryPipelineQueue(
IOptions<AdvisoryAiServiceOptions> options,
ILogger<FileSystemAdvisoryPipelineQueue> logger)
{
ArgumentNullException.ThrowIfNull(options);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
var serviceOptions = options.Value ?? throw new InvalidOperationException("Advisory AI options not configured.");
AdvisoryAiServiceOptionsValidator.Validate(serviceOptions);
_queueDirectory = serviceOptions.ResolveQueueDirectory(AppContext.BaseDirectory);
Directory.CreateDirectory(_queueDirectory);
}
public async Task EnqueueAsync(AdvisoryPipelineExecutionMessage message, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(message);
var envelope = FileQueueEnvelope.FromMessage(message);
var payload = JsonSerializer.Serialize(envelope, _serializerOptions);
var fileName = $"{DateTimeOffset.UtcNow:yyyyMMddTHHmmssfff}_{Guid.NewGuid():N}.json";
var targetPath = Path.Combine(_queueDirectory, fileName);
var tempPath = targetPath + ".tmp";
await File.WriteAllTextAsync(tempPath, payload, cancellationToken).ConfigureAwait(false);
File.Move(tempPath, targetPath, overwrite: true);
_logger.LogInformation("Queued advisory pipeline execution message {CacheKey}", message.PlanCacheKey);
}
public async Task<AdvisoryPipelineExecutionMessage?> DequeueAsync(CancellationToken cancellationToken)
{
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var files = Directory.EnumerateFiles(_queueDirectory, "*.json")
.OrderBy(path => path, StringComparer.Ordinal)
.ToArray();
foreach (var file in files)
{
AdvisoryPipelineExecutionMessage? message = null;
try
{
await using var stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.None);
var envelope = await JsonSerializer.DeserializeAsync<FileQueueEnvelope>(stream, _serializerOptions, cancellationToken).ConfigureAwait(false);
if (envelope is not null)
{
message = envelope.ToMessage();
}
}
catch (IOException)
{
// File may be locked by another worker; skip.
}
catch (JsonException ex)
{
_logger.LogWarning(ex, "Failed to deserialize advisory pipeline message from {File}", file);
}
if (message is not null)
{
TryDelete(file);
_logger.LogInformation("Dequeued advisory pipeline execution message {CacheKey}", message.PlanCacheKey);
return message;
}
TryDelete(file);
}
await Task.Delay(TimeSpan.FromSeconds(2), cancellationToken).ConfigureAwait(false);
}
}
private void TryDelete(string file)
{
try
{
File.Delete(file);
}
catch (IOException ex)
{
_logger.LogDebug(ex, "Failed to delete queue file {File}", file);
}
}
private sealed record FileQueueEnvelope(
string PlanCacheKey,
AdvisoryPipelineRequestEnvelope Request,
Dictionary<string, string> Metadata)
{
public static FileQueueEnvelope FromMessage(AdvisoryPipelineExecutionMessage message)
=> new(
message.PlanCacheKey,
AdvisoryPipelineRequestEnvelope.FromRequest(message.Request),
new Dictionary<string, string>(message.PlanMetadata, StringComparer.Ordinal));
public AdvisoryPipelineExecutionMessage ToMessage()
=> new(
PlanCacheKey,
Request.ToRequest(),
Metadata);
}
private sealed record AdvisoryPipelineRequestEnvelope(
AdvisoryTaskType TaskType,
string AdvisoryKey,
string? ArtifactId,
string? ArtifactPurl,
string? PolicyVersion,
string Profile,
string[]? PreferredSections,
bool ForceRefresh)
{
public static AdvisoryPipelineRequestEnvelope FromRequest(AdvisoryTaskRequest request)
=> new(
request.TaskType,
request.AdvisoryKey,
request.ArtifactId,
request.ArtifactPurl,
request.PolicyVersion,
request.Profile,
request.PreferredSections?.ToArray(),
request.ForceRefresh);
public AdvisoryTaskRequest ToRequest()
=> new(
TaskType,
AdvisoryKey,
ArtifactId,
ArtifactPurl,
PolicyVersion,
Profile,
PreferredSections,
ForceRefresh);
}
}

View File

@@ -0,0 +1,51 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Providers;
namespace StellaOps.AdvisoryAI.Hosting;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddAdvisoryAiCore(
this IServiceCollection services,
IConfiguration configuration,
Action<AdvisoryAiServiceOptions>? configure = null)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.AddOptions<AdvisoryAiServiceOptions>()
.Bind(configuration.GetSection("AdvisoryAI"))
.PostConfigure(options =>
{
configure?.Invoke(options);
AdvisoryAiServiceOptionsValidator.Validate(options);
})
.Validate(AdvisoryAiServiceOptionsValidator.TryValidate)
.ValidateOnStart();
services.AddOptions<SbomContextClientOptions>()
.Configure<IOptions<AdvisoryAiServiceOptions>>((target, source) =>
{
var advisoryOptions = source.Value;
target.BaseAddress = advisoryOptions.SbomBaseAddress;
target.Tenant = advisoryOptions.SbomTenant;
target.TenantHeaderName = advisoryOptions.SbomTenantHeaderName;
})
.Validate(opt => opt.BaseAddress is not null && opt.BaseAddress.IsAbsoluteUri, "SBOM base address must be absolute.");
services.AddSbomContext();
services.AddAdvisoryPipeline();
services.TryAddSingleton<FileSystemAdvisoryPipelineQueue>();
services.TryAddSingleton<IAdvisoryPipelineQueuePublisher>(sp => sp.GetRequiredService<FileSystemAdvisoryPipelineQueue>());
services.TryAddSingleton<IAdvisoryPipelineQueueReceiver>(sp => sp.GetRequiredService<FileSystemAdvisoryPipelineQueue>());
services.TryAddSingleton<AdvisoryAiMetrics>();
return services;
}
}

View File

@@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,289 +1,86 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Metrics;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Queue;
using StellaOps.AdvisoryAI.WebService.Contracts;
using StellaOps.AdvisoryAI.Execution;
using StellaOps.AdvisoryAI.Outputs;
var builder = WebApplication.CreateBuilder(args);
builder.Services.AddProblemDetails();
builder.Configuration.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
.AddJsonFile($"appsettings.{builder.Environment.EnvironmentName}.json", optional: true, reloadOnChange: true)
.AddEnvironmentVariables(prefix: "ADVISORYAI_");
builder.Services.AddAdvisoryAiCore(builder.Configuration);
builder.Services.AddEndpointsApiExplorer();
builder.Services.AddSwaggerGen();
builder.Services.AddMetrics();
builder.Services.AddAdvisoryPipeline(options => builder.Configuration.GetSection("AdvisoryAI:Pipeline").Bind(options));
builder.Services.AddAdvisoryPipelineInfrastructure();
builder.Services.Configure<AdvisoryPlanCacheOptions>(builder.Configuration.GetSection("AdvisoryAI:PlanCache"));
builder.Services.Configure<AdvisoryTaskQueueOptions>(builder.Configuration.GetSection("AdvisoryAI:TaskQueue"));
builder.Services.AddProblemDetails();
var app = builder.Build();
app.UseExceptionHandler();
app.UseStatusCodePages();
app.UseSwagger();
app.UseSwaggerUI();
app.MapGet("/health/ready", () => Results.Ok(new { status = "ready" }));
app.MapPost("/api/v1/advisory/plan", async Task<Results<Ok<AdvisoryPlanResponse>, ValidationProblem>> (
[FromBody] AdvisoryPlanRequest request,
IAdvisoryPipelineOrchestrator orchestrator,
IAdvisoryPlanCache cache,
AdvisoryPipelineMetrics metrics,
TimeProvider timeProvider,
CancellationToken cancellationToken) =>
app.UseExceptionHandler(static options => options.Run(async context =>
{
if (!MiniValidator.TryValidate(request, out var errors))
{
return TypedResults.ValidationProblem(errors);
}
var problem = Results.Problem(statusCode: StatusCodes.Status500InternalServerError);
await problem.ExecuteAsync(context);
}));
var taskRequest = request.ToTaskRequest();
var start = timeProvider.GetTimestamp();
var plan = await orchestrator.CreatePlanAsync(taskRequest, cancellationToken).ConfigureAwait(false);
await cache.SetAsync(plan.CacheKey, plan, cancellationToken).ConfigureAwait(false);
var elapsed = timeProvider.GetElapsedTime(start);
app.MapGet("/health", () => Results.Ok(new { status = "ok" }));
metrics.RecordPlanCreated(elapsed.TotalSeconds, taskRequest.TaskType);
var response = new AdvisoryPlanResponse(
plan.CacheKey,
plan.Request.TaskType,
plan.Request.AdvisoryKey,
plan.Request.Profile,
plan.StructuredChunks.Length,
plan.VectorResults.Sum(result => result.Matches.Length),
plan.SbomContext is not null,
plan.Metadata,
timeProvider.GetUtcNow());
return TypedResults.Ok(response);
});
app.MapPost("/api/v1/advisory/queue", async Task<Results<Accepted<AdvisoryQueueResponse>, ValidationProblem>> (
[FromBody] AdvisoryQueueRequest request,
IAdvisoryPlanCache cache,
IAdvisoryTaskQueue queue,
IAdvisoryPipelineOrchestrator orchestrator,
AdvisoryPipelineMetrics metrics,
TimeProvider timeProvider,
CancellationToken cancellationToken) =>
{
if (request is null)
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["request"] = new[] { "Request payload is required." }
});
}
AdvisoryTaskPlan? plan = null;
if (!string.IsNullOrWhiteSpace(request.PlanCacheKey))
{
plan = await cache.TryGetAsync(request.PlanCacheKey!, cancellationToken).ConfigureAwait(false);
}
if (plan is null)
{
if (request.Plan is null)
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["plan"] = new[] { "Either planCacheKey or plan must be supplied." }
});
}
if (!MiniValidator.TryValidate(request.Plan, out var planErrors))
{
return TypedResults.ValidationProblem(planErrors);
}
var taskRequest = request.Plan.ToTaskRequest();
var start = timeProvider.GetTimestamp();
plan = await orchestrator.CreatePlanAsync(taskRequest, cancellationToken).ConfigureAwait(false);
await cache.SetAsync(plan.CacheKey, plan, cancellationToken).ConfigureAwait(false);
var elapsed = timeProvider.GetElapsedTime(start);
metrics.RecordPlanCreated(elapsed.TotalSeconds, plan.Request.TaskType);
}
await queue.EnqueueAsync(new AdvisoryTaskQueueMessage(plan.CacheKey, plan.Request), cancellationToken).ConfigureAwait(false);
metrics.RecordPlanQueued(plan.Request.TaskType);
var response = new AdvisoryQueueResponse(
plan.CacheKey,
plan.Request.TaskType,
plan.Metadata,
"Plan enqueued for processing.");
return TypedResults.Accepted($"/api/v1/advisory/queue/{plan.CacheKey}", response);
});
app.MapPost("/api/v1/advisory/{taskType}", async Task<Results<Ok<AdvisoryOutputResponse>, ValidationProblem>> (
app.MapPost("/v1/advisory-ai/pipeline/{taskType}", async (
string taskType,
[FromBody] AdvisoryExecuteRequest request,
PipelinePlanRequest request,
IAdvisoryPipelineOrchestrator orchestrator,
IAdvisoryPlanCache cache,
IAdvisoryPipelineExecutor executor,
IAdvisoryOutputStore outputStore,
AdvisoryPipelineMetrics metrics,
TimeProvider timeProvider,
IAdvisoryPipelineQueuePublisher queue,
AdvisoryAiMetrics metrics,
ILoggerFactory loggerFactory,
CancellationToken cancellationToken) =>
{
if (!TryParseTaskType(taskType, out var taskTypeEnum, out var routeError))
if (!Enum.TryParse<AdvisoryTaskType>(taskType, ignoreCase: true, out var parsedType))
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["taskType"] = new[] { routeError }
});
return Results.BadRequest(new { error = $"Unknown task type {taskType}." });
}
if (!MiniValidator.TryValidate(request, out var errors))
{
return TypedResults.ValidationProblem(errors);
}
var httpRequest = request with { TaskType = parsedType };
var orchestratorRequest = httpRequest.ToTaskRequest();
var taskRequest = request.ToTaskRequest(taskTypeEnum);
var plan = await orchestrator.CreatePlanAsync(taskRequest, cancellationToken).ConfigureAwait(false);
var plan = await orchestrator.CreatePlanAsync(orchestratorRequest, cancellationToken).ConfigureAwait(false);
metrics.RecordRequest(plan.Request.TaskType.ToString());
var existingPlan = await cache.TryGetAsync(plan.CacheKey, cancellationToken).ConfigureAwait(false);
await cache.SetAsync(plan.CacheKey, plan, cancellationToken).ConfigureAwait(false);
await queue.EnqueueAsync(new AdvisoryPipelineExecutionMessage(plan.CacheKey, plan.Request, plan.Metadata), cancellationToken).ConfigureAwait(false);
metrics.RecordEnqueued(plan.Request.TaskType.ToString());
var planFromCache = existingPlan is not null && !request.ForceRefresh;
AdvisoryPipelineOutput? output = null;
if (!request.ForceRefresh)
{
output = await outputStore.TryGetAsync(plan.CacheKey, plan.Request.TaskType, plan.Request.Profile, cancellationToken).ConfigureAwait(false);
}
if (output is null)
{
var message = new AdvisoryTaskQueueMessage(plan.CacheKey, plan.Request);
await executor.ExecuteAsync(plan, message, planFromCache, cancellationToken).ConfigureAwait(false);
output = await outputStore.TryGetAsync(plan.CacheKey, plan.Request.TaskType, plan.Request.Profile, cancellationToken).ConfigureAwait(false);
}
if (output is null)
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["execution"] = new[] { "Failed to generate advisory output." }
});
}
metrics.RecordPlanProcessed(plan.Request.TaskType, planFromCache);
var response = ToOutputResponse(output);
return TypedResults.Ok(response);
});
app.MapGet("/api/v1/advisory/outputs/{cacheKey}", async Task<Results<Ok<AdvisoryOutputResponse>, ValidationProblem, NotFound>> (
string cacheKey,
[FromQuery] AdvisoryTaskType? taskType,
[FromQuery] string? profile,
IAdvisoryOutputStore outputStore,
CancellationToken cancellationToken) =>
{
if (string.IsNullOrWhiteSpace(cacheKey))
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["cacheKey"] = new[] { "Cache key is required." }
});
}
if (taskType is null)
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["taskType"] = new[] { "Task type query parameter is required." }
});
}
if (string.IsNullOrWhiteSpace(profile))
{
return TypedResults.ValidationProblem(new Dictionary<string, string[]>
{
["profile"] = new[] { "Profile query parameter is required." }
});
}
var output = await outputStore.TryGetAsync(cacheKey, taskType.Value, profile!, cancellationToken).ConfigureAwait(false);
if (output is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(ToOutputResponse(output));
return Results.Ok(AdvisoryPipelinePlanResponse.FromPlan(plan));
});
app.Run();
static bool TryParseTaskType(string routeValue, out AdvisoryTaskType taskType, out string error)
internal sealed record PipelinePlanRequest(
AdvisoryTaskType? TaskType,
string AdvisoryKey,
string? ArtifactId,
string? ArtifactPurl,
string? PolicyVersion,
string Profile = "default",
IReadOnlyCollection<string>? PreferredSections = null,
bool ForceRefresh = false)
{
if (Enum.TryParse(routeValue, ignoreCase: true, out taskType))
public AdvisoryTaskRequest ToTaskRequest()
{
error = string.Empty;
return true;
}
error = $"Unsupported advisory task type {routeValue}. Expected summary, conflict, or remediation.";
return false;
}
static AdvisoryOutputResponse ToOutputResponse(AdvisoryPipelineOutput output)
{
var violations = output.Guardrail.Violations
.Select(AdvisoryGuardrailViolationResponse.From)
.ToImmutableArray();
var citations = output.Citations
.Select(citation => new AdvisoryCitationResponse(citation.Index, citation.DocumentId, citation.ChunkId))
.ToImmutableArray();
return new AdvisoryOutputResponse(
output.CacheKey,
output.TaskType,
output.Profile,
output.Provenance.OutputHash,
output.Guardrail.Blocked,
violations,
output.Guardrail.Metadata,
output.Prompt,
citations,
output.Metadata,
output.GeneratedAtUtc,
output.PlanFromCache);
}
internal static class MiniValidator
{
public static bool TryValidate(object instance, out Dictionary<string, string[]> errors)
{
var context = new ValidationContext(instance);
var results = new List<ValidationResult>();
if (!Validator.TryValidateObject(instance, context, results, validateAllProperties: true))
if (TaskType is null)
{
errors = results
.GroupBy(result => result.MemberNames.FirstOrDefault() ?? string.Empty)
.ToDictionary(
group => group.Key,
group => group.Select(result => result.ErrorMessage ?? "Invalid value.").ToArray(),
StringComparer.Ordinal);
return false;
throw new InvalidOperationException("Task type must be specified.");
}
errors = new Dictionary<string, string[]>(0);
return true;
return new AdvisoryTaskRequest(
TaskType.Value,
AdvisoryKey,
ArtifactId,
ArtifactPurl,
PolicyVersion,
Profile,
PreferredSections,
ForceRefresh);
}
}

View File

@@ -1,12 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
<ProjectReference Include="..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,8 @@
{
"Logging": {
"LogLevel": {
"Default": "Debug",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@@ -0,0 +1,14 @@
{
"AdvisoryAI": {
"SbomBaseAddress": "http://localhost:5210/",
"Queue": {
"DirectoryPath": "../var/advisory-ai-queue"
}
},
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
}
}

View File

@@ -1,20 +1,74 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Queue;
using StellaOps.AdvisoryAI.Worker.Services;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Orchestration;
var builder = Host.CreateApplicationBuilder(args);
builder.Services.AddMetrics();
builder.Services.AddAdvisoryPipeline(options => builder.Configuration.GetSection("AdvisoryAI:Pipeline").Bind(options));
builder.Services.AddAdvisoryPipelineInfrastructure();
builder.Configuration.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
.AddJsonFile($"appsettings.{builder.Environment.EnvironmentName}.json", optional: true, reloadOnChange: true)
.AddEnvironmentVariables(prefix: "ADVISORYAI_");
builder.Services.Configure<AdvisoryPlanCacheOptions>(builder.Configuration.GetSection("AdvisoryAI:PlanCache"));
builder.Services.Configure<AdvisoryTaskQueueOptions>(builder.Configuration.GetSection("AdvisoryAI:TaskQueue"));
builder.Services.AddHostedService<AdvisoryTaskWorker>();
builder.Services.AddAdvisoryAiCore(builder.Configuration);
builder.Services.AddHostedService<AdvisoryPipelineWorker>();
var host = builder.Build();
await host.RunAsync();
internal sealed class AdvisoryPipelineWorker : BackgroundService
{
private readonly IAdvisoryPipelineQueueReceiver _queue;
private readonly IAdvisoryPipelineOrchestrator _orchestrator;
private readonly AdvisoryAiMetrics _metrics;
private readonly ILogger<AdvisoryPipelineWorker> _logger;
public AdvisoryPipelineWorker(
IAdvisoryPipelineQueueReceiver queue,
IAdvisoryPipelineOrchestrator orchestrator,
AdvisoryAiMetrics metrics,
ILogger<AdvisoryPipelineWorker> logger)
{
_queue = queue ?? throw new ArgumentNullException(nameof(queue));
_orchestrator = orchestrator ?? throw new ArgumentNullException(nameof(orchestrator));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("Advisory AI worker started");
while (!stoppingToken.IsCancellationRequested)
{
try
{
var message = await _queue.DequeueAsync(stoppingToken).ConfigureAwait(false);
if (message is null)
{
continue;
}
_metrics.RecordProcessed(message.Request.TaskType.ToString());
_logger.LogInformation(
"Processing advisory pipeline message {CacheKey} for {Task}",
message.PlanCacheKey,
message.Request.TaskType);
// TODO: Execute prompt assembly, guardrails, and inference workflows in future tasks.
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Unhandled exception while processing advisory pipeline queue");
await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false);
}
}
_logger.LogInformation("Advisory AI worker stopped");
}
}

View File

@@ -1,12 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk.Worker">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
<ProjectReference Include="..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,7 @@
{
"Logging": {
"LogLevel": {
"Default": "Debug"
}
}
}

View File

@@ -0,0 +1,13 @@
{
"AdvisoryAI": {
"SbomBaseAddress": "http://localhost:5210/",
"Queue": {
"DirectoryPath": "../var/advisory-ai-queue"
}
},
"Logging": {
"LogLevel": {
"Default": "Information"
}
}
}

View File

@@ -29,11 +29,13 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjecti
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{C8CE71D3-952A-43F7-9346-20113E37F672}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.WebService", "StellaOps.AdvisoryAI.WebService\\StellaOps.AdvisoryAI.WebService.csproj", "{E2F673A3-7B0E-489B-8BA6-65BF9E3A1D5C}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Hosting", "StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj", "{F3E0EA9E-E4F0-428A-804B-A599870B971D}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Worker", "StellaOps.AdvisoryAI.Worker\\StellaOps.AdvisoryAI.Worker.csproj", "{6813F3CD-6B46-4955-AB1A-30546AB10A05}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.WebService", "StellaOps.AdvisoryAI.WebService\StellaOps.AdvisoryAI.WebService.csproj", "{AD5CEACE-7BF5-4D48-B473-D60188844A0A}"
EndProject
lobal
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Worker", "StellaOps.AdvisoryAI.Worker\StellaOps.AdvisoryAI.Worker.csproj", "{BC68381E-B6EF-4481-8487-00267624D18C}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Debug|x64 = Debug|x64
@@ -187,6 +189,42 @@ lobal
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x64.Build.0 = Release|Any CPU
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x86.ActiveCfg = Release|Any CPU
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x86.Build.0 = Release|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x64.ActiveCfg = Debug|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x64.Build.0 = Debug|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x86.ActiveCfg = Debug|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x86.Build.0 = Debug|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|Any CPU.Build.0 = Release|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x64.ActiveCfg = Release|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x64.Build.0 = Release|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x86.ActiveCfg = Release|Any CPU
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x86.Build.0 = Release|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x64.ActiveCfg = Debug|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x64.Build.0 = Debug|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x86.ActiveCfg = Debug|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x86.Build.0 = Debug|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|Any CPU.Build.0 = Release|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x64.ActiveCfg = Release|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x64.Build.0 = Release|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x86.ActiveCfg = Release|Any CPU
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x86.Build.0 = Release|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x64.ActiveCfg = Debug|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x64.Build.0 = Debug|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x86.ActiveCfg = Debug|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x86.Build.0 = Debug|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|Any CPU.Build.0 = Release|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x64.ActiveCfg = Release|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x64.Build.0 = Release|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x86.ActiveCfg = Release|Any CPU
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x86.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE

View File

@@ -0,0 +1,41 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Abstractions;
using StellaOps.AdvisoryAI.Providers;
using StellaOps.AdvisoryAI.Retrievers;
namespace StellaOps.AdvisoryAI.DependencyInjection;
public static class SbomContextServiceCollectionExtensions
{
public static IServiceCollection AddSbomContext(this IServiceCollection services, Action<SbomContextClientOptions>? configure = null)
{
ArgumentNullException.ThrowIfNull(services);
var optionsBuilder = services.AddOptions<SbomContextClientOptions>();
if (configure is not null)
{
optionsBuilder.Configure(configure);
}
services.AddHttpClient<ISbomContextClient, SbomContextHttpClient>((serviceProvider, client) =>
{
var options = serviceProvider.GetRequiredService<IOptions<SbomContextClientOptions>>().Value;
if (options.BaseAddress is not null)
{
client.BaseAddress = options.BaseAddress;
}
if (!string.IsNullOrWhiteSpace(options.Tenant) && !string.IsNullOrWhiteSpace(options.TenantHeaderName))
{
client.DefaultRequestHeaders.Remove(options.TenantHeaderName);
client.DefaultRequestHeaders.Add(options.TenantHeaderName, options.Tenant);
}
});
services.TryAddSingleton<ISbomContextRetriever, SbomContextRetriever>();
return services;
}
}

View File

@@ -149,6 +149,49 @@ internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrat
{
builder["sbom_version_count"] = sbom.VersionTimeline.Count.ToString(CultureInfo.InvariantCulture);
builder["sbom_dependency_path_count"] = sbom.DependencyPaths.Count.ToString(CultureInfo.InvariantCulture);
if (!sbom.EnvironmentFlags.IsEmpty)
{
foreach (var flag in sbom.EnvironmentFlags.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder[$"sbom_env_{flag.Key}"] = flag.Value;
}
}
if (sbom.BlastRadius is not null)
{
builder["sbom_blast_impacted_assets"] = sbom.BlastRadius.ImpactedAssets.ToString(CultureInfo.InvariantCulture);
builder["sbom_blast_impacted_workloads"] = sbom.BlastRadius.ImpactedWorkloads.ToString(CultureInfo.InvariantCulture);
builder["sbom_blast_impacted_namespaces"] = sbom.BlastRadius.ImpactedNamespaces.ToString(CultureInfo.InvariantCulture);
if (sbom.BlastRadius.ImpactedPercentage is not null)
{
builder["sbom_blast_impacted_percentage"] = sbom.BlastRadius.ImpactedPercentage.Value.ToString("G", CultureInfo.InvariantCulture);
}
if (!sbom.BlastRadius.Metadata.IsEmpty)
{
foreach (var kvp in sbom.BlastRadius.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder[$"sbom_blast_meta_{kvp.Key}"] = kvp.Value;
}
}
}
if (!sbom.Metadata.IsEmpty)
{
foreach (var kvp in sbom.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder[$"sbom_meta_{kvp.Key}"] = kvp.Value;
}
}
}
if (dependency is not null)
{
foreach (var kvp in dependency.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder[$"dependency_{kvp.Key}"] = kvp.Value;
}
}
return builder.ToImmutable();
@@ -201,12 +244,100 @@ internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrat
{
builder.Append("|sbom:timeline=").Append(sbom.VersionTimeline.Count);
builder.Append("|sbom:paths=").Append(sbom.DependencyPaths.Count);
foreach (var kvp in sbom.Metadata.OrderBy(k => k.Key, StringComparer.Ordinal))
foreach (var entry in sbom.VersionTimeline
.OrderBy(e => e.Version, StringComparer.Ordinal)
.ThenBy(e => e.FirstObserved.ToUnixTimeMilliseconds())
.ThenBy(e => e.LastObserved?.ToUnixTimeMilliseconds() ?? long.MinValue)
.ThenBy(e => e.Status, StringComparer.Ordinal)
.ThenBy(e => e.Source, StringComparer.Ordinal))
{
builder.Append("|sbommeta:")
.Append(kvp.Key)
.Append('=')
.Append(kvp.Value);
builder.Append("|timeline:")
.Append(entry.Version)
.Append('@')
.Append(entry.FirstObserved.ToUnixTimeMilliseconds())
.Append('@')
.Append(entry.LastObserved?.ToUnixTimeMilliseconds() ?? -1)
.Append('@')
.Append(entry.Status)
.Append('@')
.Append(entry.Source);
}
foreach (var path in sbom.DependencyPaths
.OrderBy(path => path.IsRuntime)
.ThenBy(path => string.Join(">", path.Nodes.Select(node => node.Identifier)), StringComparer.Ordinal))
{
builder.Append("|path:")
.Append(path.IsRuntime ? 'R' : 'D');
foreach (var node in path.Nodes)
{
builder.Append(":")
.Append(node.Identifier)
.Append('@')
.Append(node.Version ?? string.Empty);
}
if (!string.IsNullOrWhiteSpace(path.Source))
{
builder.Append("|pathsrc:").Append(path.Source);
}
if (!path.Metadata.IsEmpty)
{
foreach (var kvp in path.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder.Append("|pathmeta:")
.Append(kvp.Key)
.Append('=')
.Append(kvp.Value);
}
}
}
if (!sbom.EnvironmentFlags.IsEmpty)
{
foreach (var flag in sbom.EnvironmentFlags.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder.Append("|env:")
.Append(flag.Key)
.Append('=')
.Append(flag.Value);
}
}
if (sbom.BlastRadius is not null)
{
builder.Append("|blast:")
.Append(sbom.BlastRadius.ImpactedAssets)
.Append(',')
.Append(sbom.BlastRadius.ImpactedWorkloads)
.Append(',')
.Append(sbom.BlastRadius.ImpactedNamespaces)
.Append(',')
.Append(sbom.BlastRadius.ImpactedPercentage?.ToString("G", CultureInfo.InvariantCulture) ?? string.Empty);
if (!sbom.BlastRadius.Metadata.IsEmpty)
{
foreach (var kvp in sbom.BlastRadius.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder.Append("|blastmeta:")
.Append(kvp.Key)
.Append('=')
.Append(kvp.Value);
}
}
}
if (!sbom.Metadata.IsEmpty)
{
foreach (var kvp in sbom.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder.Append("|sbommeta:")
.Append(kvp.Key)
.Append('=')
.Append(kvp.Value);
}
}
}
@@ -220,7 +351,20 @@ internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrat
.Append(':')
.Append(node.RuntimeOccurrences)
.Append(':')
.Append(node.DevelopmentOccurrences);
.Append(node.DevelopmentOccurrences)
.Append(':')
.Append(string.Join(',', node.Versions));
}
if (!dependency.Metadata.IsEmpty)
{
foreach (var kvp in dependency.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
{
builder.Append("|depmeta:")
.Append(kvp.Key)
.Append('=')
.Append(kvp.Value);
}
}
}

View File

@@ -2,6 +2,7 @@ using System.Collections.Immutable;
using StellaOps.AdvisoryAI.Abstractions;
using StellaOps.AdvisoryAI.Documents;
using StellaOps.AdvisoryAI.Context;
using StellaOps.AdvisoryAI.Documents;
using StellaOps.AdvisoryAI.Tools;
namespace StellaOps.AdvisoryAI.Orchestration;

View File

@@ -0,0 +1,30 @@
using System;
namespace StellaOps.AdvisoryAI.Providers;
/// <summary>
/// Configuration for the SBOM context HTTP client.
/// </summary>
public sealed class SbomContextClientOptions
{
/// <summary>
/// Base address for the SBOM service. Required.
/// </summary>
public Uri? BaseAddress { get; set; }
/// <summary>
/// Relative endpoint that returns SBOM context payloads.
/// Defaults to <c>api/sbom/context</c>.
/// </summary>
public string ContextEndpoint { get; set; } = "api/sbom/context";
/// <summary>
/// Optional tenant identifier that should be forwarded to the SBOM service.
/// </summary>
public string? Tenant { get; set; }
/// <summary>
/// Header name used when forwarding the tenant. Defaults to <c>X-StellaOps-Tenant</c>.
/// </summary>
public string TenantHeaderName { get; set; } = "X-StellaOps-Tenant";
}

View File

@@ -0,0 +1,232 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Json;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.AdvisoryAI.Providers;
internal sealed class SbomContextHttpClient : ISbomContextClient
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
private readonly HttpClient httpClient;
private readonly SbomContextClientOptions options;
private readonly ILogger<SbomContextHttpClient>? logger;
public SbomContextHttpClient(
HttpClient httpClient,
IOptions<SbomContextClientOptions> options,
ILogger<SbomContextHttpClient>? logger = null)
{
this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
if (options is null)
{
throw new ArgumentNullException(nameof(options));
}
this.options = options.Value ?? throw new ArgumentNullException(nameof(options));
if (this.options.BaseAddress is not null && this.httpClient.BaseAddress is null)
{
this.httpClient.BaseAddress = this.options.BaseAddress;
}
if (this.httpClient.BaseAddress is null)
{
throw new InvalidOperationException("SBOM context client requires a BaseAddress to be configured.");
}
this.httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/json");
this.logger = logger;
}
public async Task<SbomContextDocument?> GetContextAsync(SbomContextQuery query, CancellationToken cancellationToken)
{
if (query is null)
{
throw new ArgumentNullException(nameof(query));
}
var endpoint = options.ContextEndpoint?.Trim() ?? string.Empty;
if (endpoint.Length == 0)
{
throw new InvalidOperationException("SBOM context endpoint must be configured.");
}
var requestUri = BuildRequestUri(endpoint, query);
using var request = new HttpRequestMessage(HttpMethod.Get, requestUri);
ApplyTenantHeader(request);
using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound || response.StatusCode == HttpStatusCode.NoContent)
{
logger?.LogDebug("Received {StatusCode} for SBOM context request {Uri}; returning null.", (int)response.StatusCode, requestUri);
return null;
}
if (!response.IsSuccessStatusCode)
{
var content = response.Content is null
? string.Empty
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
logger?.LogWarning(
"SBOM context request {Uri} failed with status {StatusCode}. Payload: {Payload}",
requestUri,
(int)response.StatusCode,
content);
response.EnsureSuccessStatusCode();
}
var payload = await response.Content.ReadFromJsonAsync<SbomContextPayload>(SerializerOptions, cancellationToken)
.ConfigureAwait(false);
if (payload is null)
{
logger?.LogWarning("SBOM context response for {Uri} was empty.", requestUri);
return null;
}
return payload.ToDocument();
}
private Uri BuildRequestUri(string endpoint, SbomContextQuery query)
{
var relative = endpoint.StartsWith("/", StringComparison.Ordinal)
? endpoint[1..]
: endpoint;
var queryBuilder = new StringBuilder();
AppendQuery(queryBuilder, "artifactId", query.ArtifactId);
AppendQuery(queryBuilder, "maxTimelineEntries", query.MaxTimelineEntries.ToString(CultureInfo.InvariantCulture));
AppendQuery(queryBuilder, "maxDependencyPaths", query.MaxDependencyPaths.ToString(CultureInfo.InvariantCulture));
AppendQuery(queryBuilder, "includeEnvironmentFlags", query.IncludeEnvironmentFlags ? "true" : "false");
AppendQuery(queryBuilder, "includeBlastRadius", query.IncludeBlastRadius ? "true" : "false");
if (!string.IsNullOrWhiteSpace(query.Purl))
{
AppendQuery(queryBuilder, "purl", query.Purl!);
}
var uriString = queryBuilder.Length > 0 ? $"{relative}?{queryBuilder}" : relative;
return new Uri(httpClient.BaseAddress!, uriString);
static void AppendQuery(StringBuilder builder, string name, string value)
{
if (builder.Length > 0)
{
builder.Append('&');
}
builder.Append(Uri.EscapeDataString(name));
builder.Append('=');
builder.Append(Uri.EscapeDataString(value));
}
}
private void ApplyTenantHeader(HttpRequestMessage request)
{
if (string.IsNullOrWhiteSpace(options.Tenant) || string.IsNullOrWhiteSpace(options.TenantHeaderName))
{
return;
}
if (!request.Headers.Contains(options.TenantHeaderName))
{
request.Headers.Add(options.TenantHeaderName, options.Tenant);
}
}
private sealed record SbomContextPayload(
[property: JsonPropertyName("artifactId")] string ArtifactId,
[property: JsonPropertyName("purl")] string? Purl,
[property: JsonPropertyName("versions")] ImmutableArray<SbomVersionPayload> Versions,
[property: JsonPropertyName("dependencyPaths")] ImmutableArray<SbomDependencyPathPayload> DependencyPaths,
[property: JsonPropertyName("environmentFlags")] ImmutableDictionary<string, string> EnvironmentFlags,
[property: JsonPropertyName("blastRadius")] SbomBlastRadiusPayload? BlastRadius,
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
{
public SbomContextDocument ToDocument()
=> new(
ArtifactId,
Purl,
Versions.IsDefault ? ImmutableArray<SbomVersionRecord>.Empty : Versions.Select(v => v.ToRecord()).ToImmutableArray(),
DependencyPaths.IsDefault ? ImmutableArray<SbomDependencyPathRecord>.Empty : DependencyPaths.Select(p => p.ToRecord()).ToImmutableArray(),
EnvironmentFlags == default ? ImmutableDictionary<string, string>.Empty : EnvironmentFlags,
BlastRadius?.ToRecord(),
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
}
private sealed record SbomVersionPayload(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("firstObserved")] DateTimeOffset FirstObserved,
[property: JsonPropertyName("lastObserved")] DateTimeOffset? LastObserved,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("isFixAvailable")] bool IsFixAvailable,
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
{
public SbomVersionRecord ToRecord()
=> new(
Version,
FirstObserved,
LastObserved,
Status,
Source,
IsFixAvailable,
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
}
private sealed record SbomDependencyPathPayload(
[property: JsonPropertyName("nodes")] ImmutableArray<SbomDependencyNodePayload> Nodes,
[property: JsonPropertyName("isRuntime")] bool IsRuntime,
[property: JsonPropertyName("source")] string? Source,
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
{
public SbomDependencyPathRecord ToRecord()
=> new(
Nodes.IsDefault ? ImmutableArray<SbomDependencyNodeRecord>.Empty : Nodes.Select(n => n.ToRecord()).ToImmutableArray(),
IsRuntime,
Source,
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
}
private sealed record SbomDependencyNodePayload(
[property: JsonPropertyName("identifier")] string Identifier,
[property: JsonPropertyName("version")] string? Version)
{
public SbomDependencyNodeRecord ToRecord()
=> new(Identifier, Version);
}
private sealed record SbomBlastRadiusPayload(
[property: JsonPropertyName("impactedAssets")] int ImpactedAssets,
[property: JsonPropertyName("impactedWorkloads")] int ImpactedWorkloads,
[property: JsonPropertyName("impactedNamespaces")] int ImpactedNamespaces,
[property: JsonPropertyName("impactedPercentage")] double? ImpactedPercentage,
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
{
public SbomBlastRadiusRecord ToRecord()
=> new(
ImpactedAssets,
ImpactedWorkloads,
ImpactedNamespaces,
ImpactedPercentage,
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
}
}

View File

@@ -10,7 +10,6 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="System.Text.Json" Version="10.0.0-rc.2.25502.107" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />

View File

@@ -2,16 +2,14 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| AIAI-31-001 | DONE (2025-11-02) | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. | Retrievers return deterministic chunks with source IDs/sections; unit tests cover CSAF/OSV/vendor formats. |
| AIAI-31-002 | DOING | Advisory AI Guild, SBOM Service Guild | SBOM-VULN-29-001 | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Retriever returns paths/metrics under SLA; tests cover ecosystems. |
| AIAI-31-003 | DOING | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. |
| AIAI-31-002 | DONE (2025-11-04) | Advisory AI Guild, SBOM Service Guild | SBOM-VULN-29-001 | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Retriever returns paths/metrics under SLA; tests cover ecosystems. |
| AIAI-31-003 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. |
| AIAI-31-004 | DOING | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. |
| AIAI-31-004A | DONE (2025-11-03) | Advisory AI Guild, Platform Guild | AIAI-31-004, AIAI-31-002 | Wire `AdvisoryPipelineOrchestrator` into WebService/Worker, expose API/queue contracts, emit metrics, and stand up cache stub. | API returns plan metadata; worker executes queue message; metrics recorded; doc updated. |
> 2025-11-03: In-memory plan cache + task queue implemented, WebService exposes `/api/v1/advisory/plan` & `/api/v1/advisory/queue`, pipeline metrics wired, worker hosted service dequeues plans and logs processed runs; docs/sprint notes updated.
| AIAI-31-004B | DONE (2025-11-03) | Advisory AI Guild, Security Guild | AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004 | Implement prompt assembler, guardrail plumbing, cache persistence, DSSE provenance; add golden outputs. | Deterministic outputs cached; guardrails enforced; tests cover prompt assembly + caching. |
> 2025-11-03: Added deterministic prompt assembler, no-op guardrail pipeline hooks, DSSE-ready output persistence with provenance, updated metrics/DI wiring, and golden prompt tests.
| AIAI-31-004A | DOING (2025-11-04) | Advisory AI Guild, Platform Guild | AIAI-31-004, AIAI-31-002 | Wire `AdvisoryPipelineOrchestrator` into WebService/Worker, expose API/queue contracts, emit metrics, and stand up cache stub. | API returns plan metadata; worker executes queue message; metrics recorded; doc updated. |
| AIAI-31-004B | TODO | Advisory AI Guild, Security Guild | AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004 | Implement prompt assembler, guardrail plumbing, cache persistence, DSSE provenance; add golden outputs. | Deterministic outputs cached; guardrails enforced; tests cover prompt assembly + caching. |
| AIAI-31-004C | TODO | Advisory AI Guild, CLI Guild, Docs Guild | AIAI-31-004B, CLI-AIAI-31-003 | Deliver CLI `stella advise run <task>` command, renderers, documentation updates, and CLI golden tests. | CLI command produces deterministic output; docs published; smoke run recorded. |
| AIAI-31-005 | DOING (2025-11-03) | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. |
| AIAI-31-006 | DOING (2025-11-03) | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. |
| AIAI-31-005 | TODO | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. |
| AIAI-31-006 | TODO | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. |
| AIAI-31-007 | TODO | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. |
| AIAI-31-008 | TODO | Advisory AI Guild, DevOps Guild | AIAI-31-006..007 | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. | Deployment docs merged; smoke deploy executed; offline kit updated; feature flags documented. |
| AIAI-31-010 | DONE (2025-11-02) | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement Concelier advisory raw document provider mapping CSAF/OSV payloads into structured chunks for retrieval. | Provider resolves content format, preserves metadata, and passes unit tests covering CSAF/OSV cases. |
@@ -19,10 +17,10 @@
| AIAI-31-009 | TODO | Advisory AI Guild, QA Guild | AIAI-31-001..006 | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. | Test suite green; golden outputs stored; injection tests pass; perf targets documented. |
> 2025-11-02: AIAI-31-002 SBOM context domain models finalized with limiter guards; retriever tests now cover flag toggles and path dedupe. Service client integration still pending with SBOM guild.
> 2025-11-03: AIAI-31-002 HTTP SBOM context client wired with configurable headers/timeouts, DI registers fallback null client and typed retriever; tests cover request shaping, response mapping, and 404 handling.
> 2025-11-03: Blocking follow-up tracked via SBOM-AIAI-31-003 waiting on SBOM base URL/API key hand-off plus joint smoke test before enabling live retrieval in staging.
> 2025-11-04: AIAI-31-002 Introduced `SbomContextHttpClient`, DI helper (`AddSbomContext`), and HTTP-mapping tests; retriever wired to typed client with tenant header support and deterministic query construction.
> 2025-11-02: AIAI-31-003 moved to DOING starting deterministic tooling surface (version comparators & dependency analysis). Added semantic-version + EVR comparators and published toolset interface; awaiting downstream wiring.
> 2025-11-04: AIAI-31-003 completed toolset wired via DI/orchestrator, SBOM context client available, and unit coverage for compare/range/dependency analysis extended.
> 2025-11-02: AIAI-31-004 started orchestration pipeline work begin designing summary/conflict/remediation workflow (deterministic sequence + cache keys).

View File

@@ -1,4 +1,6 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Documents;
@@ -58,6 +60,77 @@ public sealed class AdvisoryPipelineOrchestratorTests
Assert.Equal(plan.CacheKey, secondPlan.CacheKey);
}
[Fact]
public async Task CreatePlanAsync_WhenArtifactIdMissing_SkipsSbomContext()
{
var structuredRetriever = new FakeStructuredRetriever();
var vectorRetriever = new FakeVectorRetriever();
var sbomRetriever = new FakeSbomContextRetriever();
var options = Options.Create(new AdvisoryPipelineOptions());
var orchestrator = new AdvisoryPipelineOrchestrator(
structuredRetriever,
vectorRetriever,
sbomRetriever,
new DeterministicToolset(),
options,
NullLogger<AdvisoryPipelineOrchestrator>.Instance);
var request = new AdvisoryTaskRequest(
AdvisoryTaskType.Conflict,
advisoryKey: "adv-key",
artifactId: null,
artifactPurl: null,
policyVersion: null,
profile: "fips-local");
var plan = await orchestrator.CreatePlanAsync(request, CancellationToken.None);
Assert.Null(plan.SbomContext);
Assert.Null(plan.DependencyAnalysis);
Assert.Equal("False", plan.Metadata["includes_sbom"]);
Assert.DoesNotContain("sbom_version_count", plan.Metadata.Keys);
Assert.DoesNotContain("sbom_dependency_path_count", plan.Metadata.Keys);
}
[Fact]
public async Task CreatePlanAsync_RespectsOptionFlagsAndProducesStableCacheKey()
{
var structuredRetriever = new FakeStructuredRetriever();
var vectorRetriever = new FakeVectorRetriever();
var togglingRetriever = new TogglingSbomContextRetriever();
var options = Options.Create(new AdvisoryPipelineOptions());
options.Value.Tasks[AdvisoryTaskType.Summary].IncludeEnvironmentFlags = false;
options.Value.Tasks[AdvisoryTaskType.Summary].IncludeBlastRadius = false;
options.Value.Tasks[AdvisoryTaskType.Summary].SbomMaxTimelineEntries = 2;
options.Value.Tasks[AdvisoryTaskType.Summary].SbomMaxDependencyPaths = 1;
var orchestrator = new AdvisoryPipelineOrchestrator(
structuredRetriever,
vectorRetriever,
togglingRetriever,
new DeterministicToolset(),
options,
NullLogger<AdvisoryPipelineOrchestrator>.Instance);
var request = new AdvisoryTaskRequest(
AdvisoryTaskType.Summary,
advisoryKey: "adv-key",
artifactId: "artifact-1",
artifactPurl: "pkg:npm/example@1.0.0",
policyVersion: "policy-1",
profile: "default");
var planOne = await orchestrator.CreatePlanAsync(request, CancellationToken.None);
var planTwo = await orchestrator.CreatePlanAsync(request, CancellationToken.None);
Assert.Equal(planOne.CacheKey, planTwo.CacheKey);
Assert.True(togglingRetriever.RecordedRequests.All(r => r.IncludeEnvironmentFlags == false));
Assert.True(togglingRetriever.RecordedRequests.All(r => r.IncludeBlastRadius == false));
Assert.True(togglingRetriever.RecordedRequests.All(r => r.MaxTimelineEntries == 2));
Assert.True(togglingRetriever.RecordedRequests.All(r => r.MaxDependencyPaths == 1));
Assert.DoesNotContain(planOne.Metadata.Keys, key => key.StartsWith("sbom_env_", StringComparison.Ordinal));
Assert.DoesNotContain(planOne.Metadata.Keys, key => key.StartsWith("sbom_blast_", StringComparison.Ordinal));
}
private sealed class FakeStructuredRetriever : IAdvisoryStructuredRetriever
{
public Task<AdvisoryRetrievalResult> RetrieveAsync(AdvisoryRetrievalRequest request, CancellationToken cancellationToken)
@@ -128,4 +201,52 @@ public sealed class AdvisoryPipelineOrchestratorTests
return Task.FromResult(result);
}
}
private sealed class TogglingSbomContextRetriever : ISbomContextRetriever
{
private int _invocation;
public List<SbomContextRequest> RecordedRequests { get; } = new();
public Task<SbomContextResult> RetrieveAsync(SbomContextRequest request, CancellationToken cancellationToken)
{
RecordedRequests.Add(request);
var evenCall = (_invocation++ % 2) == 0;
var envFlags = evenCall
? new Dictionary<string, string>(StringComparer.Ordinal) { ["prod"] = "true", ["stage"] = "false" }
: new Dictionary<string, string>(StringComparer.Ordinal) { ["stage"] = "false", ["prod"] = "true" };
var overlapMetadata = evenCall
? new Dictionary<string, string>(StringComparer.Ordinal) { ["source"] = "scanner", ["tenant"] = "alpha" }
: new Dictionary<string, string>(StringComparer.Ordinal) { ["tenant"] = "alpha", ["source"] = "scanner" };
var context = SbomContextResult.Create(
request.ArtifactId!,
request.Purl,
new[]
{
new SbomVersionTimelineEntry("1.0.0", DateTimeOffset.UtcNow.AddDays(-10), DateTimeOffset.UtcNow.AddDays(-5), "affected", "scanner"),
new SbomVersionTimelineEntry("1.1.0", DateTimeOffset.UtcNow.AddDays(-4), null, "fixed", "scanner"),
},
new[]
{
new SbomDependencyPath(new []
{
new SbomDependencyNode("root", "1.0.0"),
new SbomDependencyNode("lib-a", "2.0.0"),
}, isRuntime: true),
new SbomDependencyPath(new []
{
new SbomDependencyNode("root", "1.0.0"),
new SbomDependencyNode("lib-b", "3.5.1"),
}, isRuntime: false),
},
envFlags,
new SbomBlastRadiusSummary(5, 3, 2, 0.25, overlapMetadata),
overlapMetadata);
return Task.FromResult(context);
}
}
}

View File

@@ -51,4 +51,29 @@ public sealed class DeterministicToolsetTests
libB.RuntimeOccurrences.Should().Be(0);
libB.DevelopmentOccurrences.Should().Be(1);
}
[Theory]
[InlineData("semver", "1.2.3", "1.2.4", -1)]
[InlineData("semver", "1.2.3", "1.2.3", 0)]
[InlineData("semver", "1.2.4", "1.2.3", 1)]
[InlineData("evr", "1:1.0-1", "1:1.0-2", -1)]
[InlineData("evr", "0:2.0-0", "0:2.0-0", 0)]
[InlineData("evr", "0:2.1-0", "0:2.0-5", 1)]
public void TryCompare_SucceedsForSupportedSchemes(string scheme, string left, string right, int expected)
{
IDeterministicToolset toolset = new DeterministicToolset();
toolset.TryCompare(scheme, left, right, out var comparison).Should().BeTrue();
comparison.Should().Be(expected);
}
[Theory]
[InlineData("semver", "1.2.3", ">=1.0.0 <2.0.0")]
[InlineData("semver", "2.0.0", ">=2.0.0")]
[InlineData("evr", "0:1.2-3", ">=0:1.0-0 <0:2.0-0")]
[InlineData("evr", "1:3.4-1", ">=1:3.0-0")]
public void SatisfiesRange_HonoursExpressions(string scheme, string version, string range)
{
IDeterministicToolset toolset = new DeterministicToolset();
toolset.SatisfiesRange(scheme, version, range).Should().BeTrue();
}
}

View File

@@ -0,0 +1,144 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Providers;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests;
public sealed class SbomContextHttpClientTests
{
[Fact]
public async Task GetContextAsync_MapsPayloadToDocument()
{
const string payload = """
{
"artifactId": "artifact-001",
"purl": "pkg:npm/react@18.3.0",
"versions": [
{
"version": "18.3.0",
"firstObserved": "2025-10-01T00:00:00Z",
"lastObserved": null,
"status": "affected",
"source": "inventory",
"isFixAvailable": false,
"metadata": { "note": "current" }
}
],
"dependencyPaths": [
{
"nodes": [
{ "identifier": "app", "version": "1.0.0" },
{ "identifier": "react", "version": "18.3.0" }
],
"isRuntime": true,
"source": "scanner",
"metadata": { "scope": "production" }
}
],
"environmentFlags": {
"environment/prod": "true"
},
"blastRadius": {
"impactedAssets": 10,
"impactedWorkloads": 4,
"impactedNamespaces": 2,
"impactedPercentage": 0.25,
"metadata": { "note": "simulated" }
},
"metadata": {
"source": "sbom-service"
}
}
""";
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(payload, Encoding.UTF8, "application/json")
});
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://sbom.example/")
};
var options = Options.Create(new SbomContextClientOptions
{
ContextEndpoint = "api/sbom/context",
Tenant = "tenant-alpha",
TenantHeaderName = "X-StellaOps-Tenant"
});
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
var query = new SbomContextQuery("artifact-001", "pkg:npm/react@18.3.0", 25, 10, includeEnvironmentFlags: true, includeBlastRadius: true);
var document = await client.GetContextAsync(query, CancellationToken.None);
Assert.NotNull(document);
Assert.Equal("artifact-001", document!.ArtifactId);
Assert.Equal("pkg:npm/react@18.3.0", document.Purl);
Assert.Single(document.VersionTimeline);
Assert.Single(document.DependencyPaths);
Assert.Single(document.EnvironmentFlags);
Assert.NotNull(document.BlastRadius);
Assert.Equal("sbom-service", document.Metadata["source"]);
Assert.NotNull(handler.LastRequest);
Assert.Equal("tenant-alpha", handler.LastRequest!.Headers.GetValues("X-StellaOps-Tenant").Single());
Assert.Contains("artifactId=artifact-001", handler.LastRequest.RequestUri!.Query);
Assert.Contains("purl=pkg%3Anpm%2Freact%4018.3.0", handler.LastRequest.RequestUri!.Query);
Assert.Contains("includeEnvironmentFlags=true", handler.LastRequest.RequestUri!.Query);
Assert.Contains("includeBlastRadius=true", handler.LastRequest.RequestUri!.Query);
}
[Fact]
public async Task GetContextAsync_ReturnsNullOnNotFound()
{
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.NotFound));
var httpClient = new HttpClient(handler) { BaseAddress = new Uri("https://sbom.example/") };
var options = Options.Create(new SbomContextClientOptions());
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
var result = await client.GetContextAsync(new SbomContextQuery("missing", null, 10, 5, false, false), CancellationToken.None);
Assert.Null(result);
}
[Fact]
public async Task GetContextAsync_ThrowsForServerError()
{
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.InternalServerError)
{
Content = new StringContent("{\"error\":\"boom\"}", Encoding.UTF8, "application/json")
});
var httpClient = new HttpClient(handler) { BaseAddress = new Uri("https://sbom.example/") };
var options = Options.Create(new SbomContextClientOptions());
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetContextAsync(new SbomContextQuery("artifact", null, 5, 5, false, false), CancellationToken.None));
}
private sealed class StubHttpMessageHandler : HttpMessageHandler
{
private readonly Func<HttpRequestMessage, HttpResponseMessage> responder;
public StubHttpMessageHandler(Func<HttpRequestMessage, HttpResponseMessage> responder)
{
this.responder = responder ?? throw new ArgumentNullException(nameof(responder));
}
public HttpRequestMessage? LastRequest { get; private set; }
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
LastRequest = request;
return Task.FromResult(responder(request));
}
}
}

View File

@@ -12,6 +12,7 @@
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
</ItemGroup>
<ItemGroup>

View File

@@ -1,8 +1,6 @@
using FluentAssertions;
using System;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Metrics;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Tools;
using Xunit;
@@ -30,6 +28,12 @@ public sealed class ToolsetServiceCollectionExtensionsTests
{
var services = new ServiceCollection();
services.AddSbomContext(options =>
{
options.BaseAddress = new Uri("https://sbom.example/");
options.Tenant = "tenant-alpha";
});
services.AddAdvisoryPipeline();
var provider = services.BuildServiceProvider();
@@ -38,17 +42,4 @@ public sealed class ToolsetServiceCollectionExtensionsTests
var again = provider.GetRequiredService<IAdvisoryPipelineOrchestrator>();
Assert.Same(orchestrator, again);
}
[Fact]
public void AddAdvisoryPipelineInfrastructure_RegistersDependencies()
{
var services = new ServiceCollection();
services.AddAdvisoryPipelineInfrastructure();
var provider = services.BuildServiceProvider();
provider.GetRequiredService<IAdvisoryPlanCache>().Should().NotBeNull();
provider.GetRequiredService<IAdvisoryTaskQueue>().Should().NotBeNull();
provider.GetRequiredService<AdvisoryPipelineMetrics>().Should().NotBeNull();
}
}

View File

@@ -1,7 +1,6 @@
# Authority Host Task Board — Epic 1: Aggregation-Only Contract
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SIGN-REPLAY-186-003 | TODO | Authority Core & Signing Guild | REPLAY-CORE-185-001 | Provide replay-aware DSSE profile configuration, RootPack selection, and multi-profile validation; document flow updates in `docs/modules/authority/architecture.md` referencing `docs/replay/DETERMINISTIC_REPLAY.md` Section 5. | Authority integration tests cover replay signing; docs merged; RootPack rotation guidance updated. |
> 2025-10-26: Rate limiter metadata/audit records now include tenants, password grant scopes/tenants enforced, token persistence + tests updated. Docs refresh tracked via AUTH-AOC-19-003.
> 2025-10-27: Client credential ingestion scopes now require tenant assignment; access token validation backfills tenants and rejects cross-tenant mismatches with tests.
> 2025-10-27: `dotnet test` blocked — Concelier build fails (`AdvisoryObservationQueryService` returns `ImmutableHashSet<string?>`), preventing Authority test suite run; waiting on Concelier fix before rerun.
@@ -73,8 +72,8 @@
| AUTH-POLICY-27-002 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-POLICY-27-001, REGISTRY-API-27-007 | Provide attestation signing service bindings (OIDC token exchange, cosign integration) and enforce publish/promote scope checks, fresh-auth requirements, and audit logging. | Publish/promote requests require fresh auth + correct scopes; attestations signed with validated identity; audit logs enriched with digest + tenant; integration tests pass. |
> Docs dependency: `DOCS-POLICY-27-009` awaiting signing guidance from this work.
> 2025-11-02: Added `policy:publish`/`policy:promote` scopes with interactive-only enforcement, metadata parameters (`policy_reason`, `policy_ticket`, `policy_digest`), fresh-auth token validation, audit augmentations, and updated config/docs references.
| AUTH-POLICY-27-003 | DONE (2025-11-03) | Authority Core & Docs Guild | AUTH-POLICY-27-001, AUTH-POLICY-27-002 | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. | Docs merged; samples validated; governance checklist appended; release notes updated. |
> 2025-11-03: Authority/policy docs refreshed for publish/promote metadata, DSSE signing workflow, CLI commands, and compliance checklist alignment.
| AUTH-POLICY-27-003 | DONE (2025-11-04) | Authority Core & Docs Guild | AUTH-POLICY-27-001, AUTH-POLICY-27-002 | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. | Docs merged; samples validated; governance checklist appended; release notes updated. |
> 2025-11-04: Policy Studio roles/scopes documented across `docs/11_AUTHORITY.md`, sample configs, and OpenAPI; compliance checklist appended and Authority tests rerun to validate fresh-auth + scope enforcement.
## Exceptions v1
@@ -95,10 +94,9 @@
|----|--------|----------|------------|-------------|---------------|
| AUTH-VULN-29-001 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-POLICY-27-001 | Define Vuln Explorer scopes/roles (`vuln:view`, `vuln:investigate`, `vuln:operate`, `vuln:audit`) with ABAC attributes (env, owner, business_tier) and update discovery metadata/offline kit defaults. | Roles/scopes published; issuer templates updated; integration tests cover ABAC filters; docs refreshed. |
| AUTH-VULN-29-002 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-VULN-29-001, LEDGER-29-002 | Enforce CSRF/anti-forgery tokens for workflow actions, sign attachment tokens, and record audit logs with ledger event hashes. | Workflow calls require valid tokens; audit logs include ledger references; security tests cover token expiry/abuse. |
| AUTH-VULN-29-003 | DONE (2025-11-03) | Authority Core & Docs Guild | AUTH-VULN-29-001..002 | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. | Docs merged with compliance checklist; configuration examples validated; release notes updated. |
> 2025-11-03: `docs/11_AUTHORITY.md`, `docs/security/authority-scopes.md`, Vuln Explorer architecture, and release updates refreshed; proofread post-build.
| AUTH-VULN-29-003 | DONE (2025-11-04) | Authority Core & Docs Guild | AUTH-VULN-29-001..002 | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. | Docs merged with compliance checklist; configuration examples validated; release notes updated. |
> 2025-11-03: Vuln workflow CSRF + attachment token services live with audit enrichment and negative-path tests. Awaiting completion of full Authority suite run after repository-wide build finishes.
> 2025-11-03: Continuing doc/config/release-note updates for Vuln Explorer roles, ABAC enforcement, attachment signing, and ledger verification guidance.
> 2025-11-04: Verified Vuln Explorer RBAC/ABAC coverage in Authority docs/security guides, attachment token guidance, and offline samples; Authority tests rerun confirming ledger-token + anti-forgery behaviours.
## Advisory AI (Sprint 31)
@@ -124,24 +122,24 @@
## CLI Parity & Task Packs
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| AUTH-PACKS-41-001 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-AOC-19-001 | Define CLI SSO profiles and pack scopes (`Packs.Read`, `Packs.Write`, `Packs.Run`, `Packs.Approve`), update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit templates refreshed. |
| AUTH-PACKS-41-001 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-AOC-19-001 | Define CLI SSO profiles and pack scopes (`Packs.Read`, `Packs.Write`, `Packs.Run`, `Packs.Approve`), update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit templates refreshed. |
> 2025-11-02: Added Pack scope policies, Authority role defaults, and CLI profile guidance covering operator/publisher/approver flows.
> 2025-11-02: Shared OpenSSL 1.1 shim feeds Authority & Signals Mongo2Go harnesses so pack scope coverage keeps running on OpenSSL 3 hosts (AUTH-PACKS-41-001).
> 2025-11-03: Discovery metadata now emits `stellaops_packs_scopes_supported`; OpenAPI scope catalog and Authority tests updated. Offline kit + issuer templates already include `packs.*` roles.
> 2025-11-04: Discovery metadata/OpenAPI advertise packs scopes, configs/offline kit templates bundle new roles, and Authority tests re-run to validate tenant gating for `packs.*`.
| AUTH-PACKS-43-001 | BLOCKED (2025-10-27) | Authority Core & Security Guild | AUTH-PACKS-41-001, TASKRUN-42-001, ORCH-SVC-42-101 | Enforce pack signing policies, approval RBAC checks, CLI CI token scopes, and audit logging for approvals. | Signing policies enforced; approvals require correct roles; CI token scope tests pass; audit logs recorded. |
> Blocked: Awaiting Task Runner approval API (`ORCH-SVC-42-101`, `TASKRUN-42-001`) before enforcing pack approval workflows; Authority scope catalog + discovery metadata ready.
> Blocked: Task Runner approval APIs (`ORCH-SVC-42-101`, `TASKRUN-42-001`) still outstanding. Pack scope catalog (AUTH-PACKS-41-001) landed 2025-11-04; resume once execution/approval contracts are published.
## Authority-Backed Scopes & Tenancy (Epic 14)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
> 2025-10-28: Tidied advisory raw idempotency migration to avoid LINQ-on-`BsonValue` (explicit array copy) while continuing duplicate guardrail validation; scoped scanner/policy token call sites updated to honor new metadata parameter.
| AUTH-TEN-49-001 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. | Service tokens minted with scopes/TTL; delegation logged; quotas configurable; audit stream live; docs updated. |
> 2025-11-03: Delegation quota/persistence tests added (`ServiceAccountAdminEndpointsTests`, `DelegationTokenAuditTests`), Authority suite re-run successfully.
| AUTH-TEN-49-001 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. | Service tokens minted with scopes/TTL; delegation logged; quotas configurable; audit stream live; docs updated. |
> 2025-11-02: Authority bootstrap test harness now seeds service accounts via AuthorityDelegation options; `/internal/service-accounts` endpoints validated with targeted vstest run.
> 2025-11-02: Added Mongo service-account store, seeded options/collection initializers, token persistence metadata (`tokenKind`, `serviceAccountId`, `actorChain`), and docs/config samples. Introduced quota checks + tests covering service account issuance and persistence.
> 2025-11-02: Documented bootstrap service-account admin APIs in `docs/11_AUTHORITY.md`, noting API key requirements and stable upsert behaviour.
> 2025-11-03: Seeded explicit enabled service-account fixtures for integration tests and reran `StellaOps.Authority.Tests` to greenlight `/internal/service-accounts` listing + revocation scenarios.
> 2025-11-03: Continuing to extend delegation token persistence/quota tests and audit coverage prior to completion (Authority Core & Security Guild).
> 2025-11-04: Confirmed service-account docs/config examples, quota tuning, and audit stream wiring; Authority suite re-executed to cover issuance/listing/revocation flows.
## Observability & Forensics (Epic 15)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
@@ -154,12 +152,12 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| AUTH-AIRGAP-56-001 | DONE (2025-11-03) | Authority Core & Security Guild | AIRGAP-CTL-56-001 | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Scopes exposed in discovery docs; offline kit updated; integration tests cover issuance. |
| AUTH-AIRGAP-56-002 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-AIRGAP-56-001, AIRGAP-IMP-58-001 | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Audit records persisted; endpoint paginates results; tests cover RBAC + filtering. |
| AUTH-AIRGAP-56-001 | DONE (2025-11-04) | Authority Core & Security Guild | AIRGAP-CTL-56-001 | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Scopes exposed in discovery docs; offline kit updated; integration tests cover issuance. |
| AUTH-AIRGAP-56-002 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-AIRGAP-56-001, AIRGAP-IMP-58-001 | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Audit records persisted; endpoint paginates results; tests cover RBAC + filtering. |
> 2025-11-04: Airgap scope constants are wired through discovery metadata, `etc/authority.yaml.sample`, and offline kit docs; scope issuance tests executed via `dotnet test`.
> 2025-11-04: `/authority/audit/airgap` API persists tenant-scoped audit entries with pagination and authorization guards validated by the Authority integration suite (187 tests).
| AUTH-AIRGAP-57-001 | BLOCKED (2025-11-01) | Authority Core & Security Guild, DevOps Guild | AUTH-AIRGAP-56-001, DEVOPS-AIRGAP-57-002 | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. | Awaiting clarified sealed-confirmation contract and configuration structure before implementation. |
> 2025-11-01: AUTH-AIRGAP-57-001 blocked pending guidance on sealed-confirmation contract and configuration expectations before gating changes (Authority Core & Security Guild, DevOps Guild).
> 2025-11-03: Air-gap scopes wired through discovery metadata (`stellaops_airgap_scopes_supported`), sample configs, issuer templates, and offline kit roles; Authority OpenID discovery tests updated.
> 2025-11-03: `/authority/audit/airgap` endpoint finalized with Mongo-backed store, pagination/filters, and RBAC coverage in `AirgapAuditEndpointsTests`; Authority suite passing.
## SDKs & OpenAPI (Epic 17)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,8 @@ internal interface IBackendOperationsClient
Task<PolicySimulationResult> SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken);
Task<TaskRunnerSimulationResult> SimulateTaskRunnerAsync(TaskRunnerSimulationRequest request, CancellationToken cancellationToken);
Task<PolicyActivationResult> ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken);
Task<OfflineKitDownloadResult> DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken);

View File

@@ -0,0 +1,36 @@
using System.Collections.Generic;
using System.Text.Json.Nodes;
namespace StellaOps.Cli.Services.Models;
internal sealed record TaskRunnerSimulationRequest(string Manifest, JsonObject? Inputs);
internal sealed record TaskRunnerSimulationResult(
string PlanHash,
TaskRunnerSimulationFailurePolicy FailurePolicy,
IReadOnlyList<TaskRunnerSimulationStep> Steps,
IReadOnlyList<TaskRunnerSimulationOutput> Outputs,
bool HasPendingApprovals);
internal sealed record TaskRunnerSimulationFailurePolicy(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
internal sealed record TaskRunnerSimulationStep(
string Id,
string TemplateId,
string Kind,
bool Enabled,
string Status,
string? StatusReason,
string? Uses,
string? ApprovalId,
string? GateMessage,
int? MaxParallel,
bool ContinueOnError,
IReadOnlyList<TaskRunnerSimulationStep> Children);
internal sealed record TaskRunnerSimulationOutput(
string Name,
string Type,
bool RequiresRuntimeValue,
string? PathExpression,
string? ValueExpression);

View File

@@ -0,0 +1,73 @@
using System.Collections.Generic;
using System.Text.Json.Nodes;
namespace StellaOps.Cli.Services.Models.Transport;
internal sealed class TaskRunnerSimulationRequestDocument
{
public string Manifest { get; set; } = string.Empty;
public JsonObject? Inputs { get; set; }
}
internal sealed class TaskRunnerSimulationResponseDocument
{
public string PlanHash { get; set; } = string.Empty;
public TaskRunnerSimulationFailurePolicyDocument? FailurePolicy { get; set; }
public List<TaskRunnerSimulationStepDocument>? Steps { get; set; }
public List<TaskRunnerSimulationOutputDocument>? Outputs { get; set; }
public bool HasPendingApprovals { get; set; }
}
internal sealed class TaskRunnerSimulationFailurePolicyDocument
{
public int MaxAttempts { get; set; }
public int BackoffSeconds { get; set; }
public bool ContinueOnError { get; set; }
}
internal sealed class TaskRunnerSimulationStepDocument
{
public string Id { get; set; } = string.Empty;
public string TemplateId { get; set; } = string.Empty;
public string Kind { get; set; } = string.Empty;
public bool Enabled { get; set; }
public string Status { get; set; } = string.Empty;
public string? StatusReason { get; set; }
public string? Uses { get; set; }
public string? ApprovalId { get; set; }
public string? GateMessage { get; set; }
public int? MaxParallel { get; set; }
public bool ContinueOnError { get; set; }
public List<TaskRunnerSimulationStepDocument>? Children { get; set; }
}
internal sealed class TaskRunnerSimulationOutputDocument
{
public string Name { get; set; } = string.Empty;
public string Type { get; set; } = string.Empty;
public bool RequiresRuntimeValue { get; set; }
public string? PathExpression { get; set; }
public string? ValueExpression { get; set; }
}

View File

@@ -13,6 +13,7 @@ internal static class CliMetrics
private static readonly Counter<long> OfflineKitDownloadCounter = Meter.CreateCounter<long>("stellaops.cli.offline.kit.download.count");
private static readonly Counter<long> OfflineKitImportCounter = Meter.CreateCounter<long>("stellaops.cli.offline.kit.import.count");
private static readonly Counter<long> PolicySimulationCounter = Meter.CreateCounter<long>("stellaops.cli.policy.simulate.count");
private static readonly Counter<long> TaskRunnerSimulationCounter = Meter.CreateCounter<long>("stellaops.cli.taskrunner.simulate.count");
private static readonly Counter<long> PolicyActivationCounter = Meter.CreateCounter<long>("stellaops.cli.policy.activate.count");
private static readonly Counter<long> SourcesDryRunCounter = Meter.CreateCounter<long>("stellaops.cli.sources.dryrun.count");
private static readonly Counter<long> AocVerifyCounter = Meter.CreateCounter<long>("stellaops.cli.aoc.verify.count");
@@ -57,6 +58,12 @@ internal static class CliMetrics
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
});
public static void RecordTaskRunnerSimulation(string outcome)
=> TaskRunnerSimulationCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
});
public static void RecordPolicyActivation(string outcome)
=> PolicyActivationCounter.Add(1, new KeyValuePair<string, object?>[]
{

View File

@@ -8,7 +8,8 @@ using System.Net;
using System.Net.Http;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
@@ -1535,11 +1536,11 @@ public sealed class CommandHandlersTests
}
[Fact]
public async Task HandlePolicySimulateAsync_MapsErrorCodes()
{
var originalExit = Environment.ExitCode;
var originalOut = Console.Out;
public async Task HandlePolicySimulateAsync_MapsErrorCodes()
{
var originalExit = Environment.ExitCode;
var originalOut = Console.Out;
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
SimulationException = new PolicyApiException("Missing inputs", HttpStatusCode.BadRequest, "ERR_POL_003")
@@ -1566,18 +1567,185 @@ public sealed class CommandHandlersTests
cancellationToken: CancellationToken.None);
Assert.Equal(21, Environment.ExitCode);
}
finally
{
Console.SetOut(originalOut);
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandlePolicyActivateAsync_DisplaysInteractiveSummary()
{
var originalExit = Environment.ExitCode;
}
finally
{
Console.SetOut(originalOut);
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandleTaskRunnerSimulateAsync_WritesInteractiveSummary()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
var console = new TestConsole();
console.Width(120);
console.Interactive();
console.EmitAnsiSequences();
AnsiConsole.Console = console;
const string manifest = """
apiVersion: stellaops.io/pack.v1
kind: TaskPack
metadata:
name: sample-pack
spec:
steps:
- id: prepare
run:
uses: builtin:prepare
- id: approval
gate:
approval:
id: security-review
message: Security approval required.
""";
using var manifestFile = new TempFile("pack.yaml", Encoding.UTF8.GetBytes(manifest));
var simulationResult = new TaskRunnerSimulationResult(
"hash-abc123",
new TaskRunnerSimulationFailurePolicy(3, 15, false),
new[]
{
new TaskRunnerSimulationStep(
"prepare",
"prepare",
"Run",
true,
"succeeded",
null,
"builtin:prepare",
null,
null,
null,
false,
Array.Empty<TaskRunnerSimulationStep>()),
new TaskRunnerSimulationStep(
"approval",
"approval",
"GateApproval",
true,
"pending",
"requires-approval",
null,
"security-review",
"Security approval required.",
null,
false,
Array.Empty<TaskRunnerSimulationStep>())
},
new[]
{
new TaskRunnerSimulationOutput("bundlePath", "file", false, "artifacts/report.json", null)
},
true);
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
TaskRunnerSimulationResult = simulationResult
};
var provider = BuildServiceProvider(backend);
try
{
await CommandHandlers.HandleTaskRunnerSimulateAsync(
provider,
manifestFile.Path,
inputsPath: null,
format: null,
outputPath: null,
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(0, Environment.ExitCode);
Assert.NotNull(backend.LastTaskRunnerSimulationRequest);
Assert.Contains("approval", console.Output, StringComparison.OrdinalIgnoreCase);
Assert.Contains("Plan Hash", console.Output, StringComparison.OrdinalIgnoreCase);
}
finally
{
AnsiConsole.Console = originalConsole;
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandleTaskRunnerSimulateAsync_WritesJsonOutput()
{
var originalExit = Environment.ExitCode;
var originalOut = Console.Out;
const string manifest = """
apiVersion: stellaops.io/pack.v1
kind: TaskPack
metadata:
name: sample-pack
spec:
steps:
- id: prepare
run:
uses: builtin:prepare
""";
using var manifestFile = new TempFile("pack.yaml", Encoding.UTF8.GetBytes(manifest));
using var inputsFile = new TempFile("inputs.json", Encoding.UTF8.GetBytes("{\"dryRun\":false}"));
using var outputDirectory = new TempDirectory();
var outputPath = Path.Combine(outputDirectory.Path, "simulation.json");
var simulationResult = new TaskRunnerSimulationResult(
"hash-xyz789",
new TaskRunnerSimulationFailurePolicy(2, 10, true),
Array.Empty<TaskRunnerSimulationStep>(),
Array.Empty<TaskRunnerSimulationOutput>(),
false);
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
TaskRunnerSimulationResult = simulationResult
};
var provider = BuildServiceProvider(backend);
using var writer = new StringWriter();
Console.SetOut(writer);
try
{
await CommandHandlers.HandleTaskRunnerSimulateAsync(
provider,
manifestFile.Path,
inputsFile.Path,
format: "json",
outputPath: outputPath,
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(0, Environment.ExitCode);
Assert.NotNull(backend.LastTaskRunnerSimulationRequest);
var consoleOutput = writer.ToString();
Assert.Contains("\"planHash\":\"hash-xyz789\"", consoleOutput, StringComparison.Ordinal);
var fileOutput = await File.ReadAllTextAsync(outputPath);
Assert.Contains("\"planHash\":\"hash-xyz789\"", fileOutput, StringComparison.Ordinal);
Assert.True(backend.LastTaskRunnerSimulationRequest!.Inputs!.TryGetPropertyValue("dryRun", out var dryRunNode));
Assert.False(dryRunNode!.GetValue<bool>());
}
finally
{
Console.SetOut(originalOut);
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandlePolicyActivateAsync_DisplaysInteractiveSummary()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
var console = new TestConsole();
@@ -2397,7 +2565,15 @@ public sealed class CommandHandlersTests
new ReadOnlyCollection<PolicySimulationRuleDelta>(Array.Empty<PolicySimulationRuleDelta>())),
null);
public PolicyApiException? SimulationException { get; set; }
public (string PolicyId, PolicySimulationInput Input)? LastPolicySimulation { get; private set; }
public (string PolicyId, PolicySimulationInput Input)? LastPolicySimulation { get; private set; }
public TaskRunnerSimulationRequest? LastTaskRunnerSimulationRequest { get; private set; }
public TaskRunnerSimulationResult TaskRunnerSimulationResult { get; set; } = new(
string.Empty,
new TaskRunnerSimulationFailurePolicy(1, 0, false),
Array.Empty<TaskRunnerSimulationStep>(),
Array.Empty<TaskRunnerSimulationOutput>(),
false);
public Exception? TaskRunnerSimulationException { get; set; }
public PolicyActivationResult ActivationResult { get; set; } = new PolicyActivationResult(
"activated",
new PolicyActivationRevision(
@@ -2486,17 +2662,28 @@ public sealed class CommandHandlersTests
public Task<RuntimePolicyEvaluationResult> EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken)
=> Task.FromResult(RuntimePolicyResult);
public Task<PolicySimulationResult> SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken)
{
LastPolicySimulation = (policyId, input);
if (SimulationException is not null)
{
throw SimulationException;
}
return Task.FromResult(SimulationResult);
}
public Task<PolicySimulationResult> SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken)
{
LastPolicySimulation = (policyId, input);
if (SimulationException is not null)
{
throw SimulationException;
}
return Task.FromResult(SimulationResult);
}
public Task<TaskRunnerSimulationResult> SimulateTaskRunnerAsync(TaskRunnerSimulationRequest request, CancellationToken cancellationToken)
{
LastTaskRunnerSimulationRequest = request;
if (TaskRunnerSimulationException is not null)
{
throw TaskRunnerSimulationException;
}
return Task.FromResult(TaskRunnerSimulationResult);
}
public Task<PolicyActivationResult> ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken)
{
LastPolicyActivation = (policyId, version, request);

View File

@@ -3,10 +3,10 @@
|----|--------|----------|------------|-------------|---------------|
| ISSUER-30-001 | DONE (2025-11-01) | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. |
| ISSUER-30-002 | DONE (2025-11-01) | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. |
| ISSUER-30-003 | DONE (2025-11-03) | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. |
| ISSUER-30-003 | DONE (2025-11-04) | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. |
> 2025-11-04: `/issuer-directory/issuers/{id}/trust` endpoints deliver bounded overrides with audit logging, Mongo indexes seeded for uniqueness, config/docs updated, and core tests executed (`dotnet test`).
| ISSUER-30-004 | DONE (2025-11-01) | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitor signature verification (client SDK, caching, retries). | Lens/Excitor resolve issuer metadata via SDK; integration tests cover network failures. |
| ISSUER-30-005 | DONE (2025-11-01) | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. |
| ISSUER-30-006 | DONE (2025-11-02) | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. |
> 2025-11-01: Excititor worker now queries Issuer Directory via during attestation verification, caching active key metadata and trust weights for tenant/global scopes.
> 2025-11-03: Trust override APIs/client helpers merged; reflection-based client tests cover cache eviction and failure paths; Issuer Directory Core tests passed.

View File

@@ -86,109 +86,287 @@ internal sealed class GraphJobService : IGraphJobService
}
public async Task<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
{
return await _store.GetJobsAsync(tenantId, query, cancellationToken);
}
public async Task<GraphJobResponse> CompleteJobAsync(string tenantId, GraphJobCompletionRequest request, CancellationToken cancellationToken)
{
if (request.Status is not (GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled))
{
throw new ValidationException("Completion requires status completed, failed, or cancelled.");
}
var occurredAt = request.OccurredAt == default ? _clock.UtcNow : request.OccurredAt.ToUniversalTime();
switch (request.JobType)
{
case GraphJobQueryType.Build:
{
var existing = await _store.GetBuildJobAsync(tenantId, request.JobId, cancellationToken);
if (existing is null)
{
throw new KeyNotFoundException($"Graph build job '{request.JobId}' not found.");
}
var current = existing;
if (current.Status is GraphJobStatus.Pending or GraphJobStatus.Queued)
{
current = GraphJobStateMachine.EnsureTransition(current, GraphJobStatus.Running, occurredAt, attempts: current.Attempts);
}
var updated = GraphJobStateMachine.EnsureTransition(current, request.Status, occurredAt, attempts: current.Attempts + 1, errorMessage: request.Error);
var metadata = MergeMetadata(updated.Metadata, request.ResultUri);
var normalized = new GraphBuildJob(
id: updated.Id,
tenantId: updated.TenantId,
sbomId: updated.SbomId,
sbomVersionId: updated.SbomVersionId,
sbomDigest: updated.SbomDigest,
graphSnapshotId: request.GraphSnapshotId?.Trim() ?? updated.GraphSnapshotId,
status: updated.Status,
trigger: updated.Trigger,
attempts: updated.Attempts,
cartographerJobId: updated.CartographerJobId,
correlationId: request.CorrelationId?.Trim() ?? updated.CorrelationId,
createdAt: updated.CreatedAt,
startedAt: updated.StartedAt,
completedAt: updated.CompletedAt,
error: updated.Error,
metadata: metadata,
schemaVersion: updated.SchemaVersion);
var stored = await _store.UpdateAsync(normalized, cancellationToken);
var response = GraphJobResponse.From(stored);
await PublishCompletionAsync(tenantId, GraphJobQueryType.Build, request.Status, occurredAt, response, request.ResultUri, request.CorrelationId, request.Error, cancellationToken);
return response;
}
case GraphJobQueryType.Overlay:
{
var existing = await _store.GetOverlayJobAsync(tenantId, request.JobId, cancellationToken);
if (existing is null)
{
throw new KeyNotFoundException($"Graph overlay job '{request.JobId}' not found.");
}
var current = existing;
if (current.Status is GraphJobStatus.Pending or GraphJobStatus.Queued)
{
current = GraphJobStateMachine.EnsureTransition(current, GraphJobStatus.Running, occurredAt, attempts: current.Attempts);
}
var updated = GraphJobStateMachine.EnsureTransition(current, request.Status, occurredAt, attempts: current.Attempts + 1, errorMessage: request.Error);
var metadata = MergeMetadata(updated.Metadata, request.ResultUri);
var normalized = new GraphOverlayJob(
id: updated.Id,
tenantId: updated.TenantId,
graphSnapshotId: updated.GraphSnapshotId,
buildJobId: updated.BuildJobId,
overlayKind: updated.OverlayKind,
overlayKey: updated.OverlayKey,
subjects: updated.Subjects,
status: updated.Status,
trigger: updated.Trigger,
attempts: updated.Attempts,
correlationId: request.CorrelationId?.Trim() ?? updated.CorrelationId,
createdAt: updated.CreatedAt,
startedAt: updated.StartedAt,
completedAt: updated.CompletedAt,
error: updated.Error,
metadata: metadata,
schemaVersion: updated.SchemaVersion);
var stored = await _store.UpdateAsync(normalized, cancellationToken);
var response = GraphJobResponse.From(stored);
await PublishCompletionAsync(tenantId, GraphJobQueryType.Overlay, request.Status, occurredAt, response, request.ResultUri, request.CorrelationId, request.Error, cancellationToken);
return response;
}
default:
throw new ValidationException("Unsupported job type.");
}
}
public async Task<OverlayLagMetricsResponse> GetOverlayLagMetricsAsync(string tenantId, CancellationToken cancellationToken)
{
return await _store.GetJobsAsync(tenantId, query, cancellationToken);
}
public async Task<GraphJobResponse> CompleteJobAsync(string tenantId, GraphJobCompletionRequest request, CancellationToken cancellationToken)
{
if (request.Status is not (GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled))
{
throw new ValidationException("Completion requires status completed, failed, or cancelled.");
}
var occurredAt = request.OccurredAt == default ? _clock.UtcNow : request.OccurredAt.ToUniversalTime();
var graphSnapshotId = Normalize(request.GraphSnapshotId);
var correlationId = Normalize(request.CorrelationId);
var resultUri = Normalize(request.ResultUri);
var error = request.Status == GraphJobStatus.Failed ? Normalize(request.Error) : null;
switch (request.JobType)
{
case GraphJobQueryType.Build:
{
var existing = await _store.GetBuildJobAsync(tenantId, request.JobId, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph build job '{request.JobId}' not found.");
}
return await CompleteBuildJobInternal(
tenantId,
existing,
request.Status,
occurredAt,
graphSnapshotId,
correlationId,
resultUri,
error,
cancellationToken).ConfigureAwait(false);
}
case GraphJobQueryType.Overlay:
{
var existing = await _store.GetOverlayJobAsync(tenantId, request.JobId, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph overlay job '{request.JobId}' not found.");
}
return await CompleteOverlayJobInternal(
tenantId,
existing,
request.Status,
occurredAt,
graphSnapshotId,
correlationId,
resultUri,
error,
cancellationToken).ConfigureAwait(false);
}
default:
throw new ValidationException("Unsupported job type.");
}
}
private async Task<GraphJobResponse> CompleteBuildJobInternal(
string tenantId,
GraphBuildJob current,
GraphJobStatus requestedStatus,
DateTimeOffset occurredAt,
string? graphSnapshotId,
string? correlationId,
string? resultUri,
string? error,
CancellationToken cancellationToken)
{
var latest = current;
for (var attempt = 0; attempt < 3; attempt++)
{
var transition = PrepareBuildTransition(latest, requestedStatus, occurredAt, graphSnapshotId, correlationId, resultUri, error);
if (!transition.HasChanges)
{
return GraphJobResponse.From(latest);
}
var updateResult = await _store.UpdateAsync(transition.Job, transition.ExpectedStatus, cancellationToken).ConfigureAwait(false);
if (updateResult.Updated)
{
var stored = updateResult.Job;
var response = GraphJobResponse.From(stored);
if (transition.ShouldPublish)
{
await PublishCompletionAsync(
tenantId,
GraphJobQueryType.Build,
stored.Status,
occurredAt,
response,
ExtractResultUri(response),
stored.CorrelationId,
stored.Error,
cancellationToken).ConfigureAwait(false);
}
return response;
}
latest = updateResult.Job;
}
return GraphJobResponse.From(latest);
}
private async Task<GraphJobResponse> CompleteOverlayJobInternal(
string tenantId,
GraphOverlayJob current,
GraphJobStatus requestedStatus,
DateTimeOffset occurredAt,
string? graphSnapshotId,
string? correlationId,
string? resultUri,
string? error,
CancellationToken cancellationToken)
{
var latest = current;
for (var attempt = 0; attempt < 3; attempt++)
{
var transition = PrepareOverlayTransition(latest, requestedStatus, occurredAt, graphSnapshotId, correlationId, resultUri, error);
if (!transition.HasChanges)
{
return GraphJobResponse.From(latest);
}
var updateResult = await _store.UpdateAsync(transition.Job, transition.ExpectedStatus, cancellationToken).ConfigureAwait(false);
if (updateResult.Updated)
{
var stored = updateResult.Job;
var response = GraphJobResponse.From(stored);
if (transition.ShouldPublish)
{
await PublishCompletionAsync(
tenantId,
GraphJobQueryType.Overlay,
stored.Status,
occurredAt,
response,
ExtractResultUri(response),
stored.CorrelationId,
stored.Error,
cancellationToken).ConfigureAwait(false);
}
return response;
}
latest = updateResult.Job;
}
return GraphJobResponse.From(latest);
}
private static CompletionTransition<GraphBuildJob> PrepareBuildTransition(
GraphBuildJob current,
GraphJobStatus requestedStatus,
DateTimeOffset occurredAt,
string? graphSnapshotId,
string? correlationId,
string? resultUri,
string? error)
{
var transitional = current;
if (transitional.Status is GraphJobStatus.Pending or GraphJobStatus.Queued)
{
transitional = GraphJobStateMachine.EnsureTransition(transitional, GraphJobStatus.Running, occurredAt, attempts: transitional.Attempts);
}
var desiredAttempts = transitional.Status == requestedStatus ? transitional.Attempts : transitional.Attempts + 1;
var updated = GraphJobStateMachine.EnsureTransition(transitional, requestedStatus, occurredAt, attempts: desiredAttempts, errorMessage: error);
var metadata = updated.Metadata;
if (resultUri is { Length: > 0 })
{
if (!metadata.TryGetValue("resultUri", out var existingValue) || !string.Equals(existingValue, resultUri, StringComparison.Ordinal))
{
metadata = MergeMetadata(metadata, resultUri);
}
}
var normalized = new GraphBuildJob(
id: updated.Id,
tenantId: updated.TenantId,
sbomId: updated.SbomId,
sbomVersionId: updated.SbomVersionId,
sbomDigest: updated.SbomDigest,
graphSnapshotId: graphSnapshotId ?? updated.GraphSnapshotId,
status: updated.Status,
trigger: updated.Trigger,
attempts: updated.Attempts,
cartographerJobId: updated.CartographerJobId,
correlationId: correlationId ?? updated.CorrelationId,
createdAt: updated.CreatedAt,
startedAt: updated.StartedAt,
completedAt: updated.CompletedAt,
error: updated.Error,
metadata: metadata,
schemaVersion: updated.SchemaVersion);
var hasChanges = !normalized.Equals(current);
var shouldPublish = hasChanges && current.Status != normalized.Status;
return new CompletionTransition<GraphBuildJob>(normalized, current.Status, hasChanges, shouldPublish);
}
private static CompletionTransition<GraphOverlayJob> PrepareOverlayTransition(
GraphOverlayJob current,
GraphJobStatus requestedStatus,
DateTimeOffset occurredAt,
string? graphSnapshotId,
string? correlationId,
string? resultUri,
string? error)
{
var transitional = current;
if (transitional.Status is GraphJobStatus.Pending or GraphJobStatus.Queued)
{
transitional = GraphJobStateMachine.EnsureTransition(transitional, GraphJobStatus.Running, occurredAt, attempts: transitional.Attempts);
}
var desiredAttempts = transitional.Status == requestedStatus ? transitional.Attempts : transitional.Attempts + 1;
var updated = GraphJobStateMachine.EnsureTransition(transitional, requestedStatus, occurredAt, attempts: desiredAttempts, errorMessage: error);
var metadata = updated.Metadata;
if (resultUri is { Length: > 0 })
{
if (!metadata.TryGetValue("resultUri", out var existingValue) || !string.Equals(existingValue, resultUri, StringComparison.Ordinal))
{
metadata = MergeMetadata(metadata, resultUri);
}
}
var normalized = new GraphOverlayJob(
id: updated.Id,
tenantId: updated.TenantId,
graphSnapshotId: graphSnapshotId ?? updated.GraphSnapshotId,
buildJobId: updated.BuildJobId,
overlayKind: updated.OverlayKind,
overlayKey: updated.OverlayKey,
subjects: updated.Subjects,
status: updated.Status,
trigger: updated.Trigger,
attempts: updated.Attempts,
correlationId: correlationId ?? updated.CorrelationId,
createdAt: updated.CreatedAt,
startedAt: updated.StartedAt,
completedAt: updated.CompletedAt,
error: updated.Error,
metadata: metadata,
schemaVersion: updated.SchemaVersion);
var hasChanges = !normalized.Equals(current);
var shouldPublish = hasChanges && current.Status != normalized.Status;
return new CompletionTransition<GraphOverlayJob>(normalized, current.Status, hasChanges, shouldPublish);
}
private static string? Normalize(string? value)
=> string.IsNullOrWhiteSpace(value) ? null : value.Trim();
private static string? ExtractResultUri(GraphJobResponse response)
=> response.Payload switch
{
GraphBuildJob build when build.Metadata.TryGetValue("resultUri", out var value) => value,
GraphOverlayJob overlay when overlay.Metadata.TryGetValue("resultUri", out var value) => value,
_ => null
};
private sealed record CompletionTransition<TJob>(TJob Job, GraphJobStatus ExpectedStatus, bool HasChanges, bool ShouldPublish)
where TJob : class;
public async Task<OverlayLagMetricsResponse> GetOverlayLagMetricsAsync(string tenantId, CancellationToken cancellationToken)
{
var now = _clock.UtcNow;
var overlayJobs = await _store.GetOverlayJobsAsync(tenantId, cancellationToken);

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Scheduler.WebService.GraphJobs;
internal readonly record struct GraphJobUpdateResult<TJob>(bool Updated, TJob Job) where TJob : class
{
public static GraphJobUpdateResult<TJob> UpdatedResult(TJob job) => new(true, job);
public static GraphJobUpdateResult<TJob> NotUpdated(TJob job) => new(false, job);
}

View File

@@ -14,9 +14,9 @@ public interface IGraphJobStore
ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken);
ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken);
ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken);
ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken);
ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken);
ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken);
}

View File

@@ -1,4 +1,5 @@
using System.Collections.Concurrent;
using System.Collections.Concurrent;
using System.Collections.Generic;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.WebService.GraphJobs;
@@ -60,17 +61,37 @@ internal sealed class InMemoryGraphJobStore : IGraphJobStore
return ValueTask.FromResult<GraphOverlayJob?>(null);
}
public ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken)
{
_buildJobs[job.Id] = job;
return ValueTask.FromResult(job);
}
public ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken)
{
_overlayJobs[job.Id] = job;
return ValueTask.FromResult(job);
}
public ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (_buildJobs.TryGetValue(job.Id, out var existing) && string.Equals(existing.TenantId, job.TenantId, StringComparison.Ordinal))
{
if (existing.Status == expectedStatus)
{
_buildJobs[job.Id] = job;
return ValueTask.FromResult(GraphJobUpdateResult<GraphBuildJob>.UpdatedResult(job));
}
return ValueTask.FromResult(GraphJobUpdateResult<GraphBuildJob>.NotUpdated(existing));
}
throw new KeyNotFoundException($"Graph build job '{job.Id}' not found.");
}
public ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (_overlayJobs.TryGetValue(job.Id, out var existing) && string.Equals(existing.TenantId, job.TenantId, StringComparison.Ordinal))
{
if (existing.Status == expectedStatus)
{
_overlayJobs[job.Id] = job;
return ValueTask.FromResult(GraphJobUpdateResult<GraphOverlayJob>.UpdatedResult(job));
}
return ValueTask.FromResult(GraphJobUpdateResult<GraphOverlayJob>.NotUpdated(existing));
}
throw new KeyNotFoundException($"Graph overlay job '{job.Id}' not found.");
}
public ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
{

View File

@@ -1,4 +1,5 @@
using StellaOps.Scheduler.Models;
using System.Collections.Generic;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
namespace StellaOps.Scheduler.WebService.GraphJobs;
@@ -44,11 +45,37 @@ internal sealed class MongoGraphJobStore : IGraphJobStore
public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
public async ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken)
=> await _repository.ReplaceAsync(job, cancellationToken);
public async ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken)
=> await _repository.ReplaceAsync(job, cancellationToken);
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (await _repository.TryReplaceAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
{
return GraphJobUpdateResult<GraphBuildJob>.UpdatedResult(job);
}
var existing = await _repository.GetBuildJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph build job '{job.Id}' not found.");
}
return GraphJobUpdateResult<GraphBuildJob>.NotUpdated(existing);
}
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
if (await _repository.TryReplaceOverlayAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
{
return GraphJobUpdateResult<GraphOverlayJob>.UpdatedResult(job);
}
var existing = await _repository.GetOverlayJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
throw new KeyNotFoundException($"Graph overlay job '{job.Id}' not found.");
}
return GraphJobUpdateResult<GraphOverlayJob>.NotUpdated(existing);
}
public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
=> await _repository.ListOverlayJobsAsync(tenantId, cancellationToken);

View File

@@ -16,8 +16,10 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCHED-WEB-21-004 | DOING (2025-10-26) | Scheduler WebService Guild, Scheduler Storage Guild | SCHED-WEB-21-001, SCHED-STORAGE-16-201 | Persist graph job lifecycle to Mongo storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer. | Storage repositories updated; events emitted; webhook payload documented; integration tests cover storage + event flow. **Note:** Events currently log JSON envelopes while the shared platform bus is provisioned. Cartographer webhook now posts JSON payloads when configured; replace inline logging with bus publisher once the shared event transport is online. |
| SCHED-WEB-21-004 | DONE (2025-11-04) | Scheduler WebService Guild, Scheduler Storage Guild | SCHED-WEB-21-001, SCHED-STORAGE-16-201 | Persist graph job lifecycle to Mongo storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer. | Storage repositories updated; events emitted; webhook payload documented; integration tests cover storage + event flow. **Note:** Events currently log JSON envelopes while the shared platform bus is provisioned. Cartographer webhook now posts JSON payloads when configured; replace inline logging with bus publisher once the shared event transport is online. |
> 2025-10-30: Implemented Redis-backed publisher (`Scheduler:Events:GraphJobs`) emitting `scheduler.graph.job.completed@1` to configured stream with optional logging fallback; docs/configs to be validated with DevOps before closing.
> 2025-11-04: Resumed SCHED-WEB-21-004 to finalize Mongo lifecycle persistence guards, graph completion events, and Cartographer webhook verification.
> 2025-11-04: SCHED-WEB-21-004 completed lifecycle stored in Mongo with optimistic concurrency, completion events/webhooks emitted once per transition, and result URI metadata refreshed idempotently with unit/integration coverage.
## StellaOps Console (Sprint 23)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |

View File

@@ -97,7 +97,7 @@ Webhook invoked by Scheduler Worker once Cartographer finishes a build/overlay j
}
```
The endpoint advances the job through `running → terminal` transitions via `GraphJobStateMachine`, captures the latest correlation identifier, and stores the optional `resultUri` in metadata for downstream exports.
The endpoint advances the job through `running → terminal` transitions via `GraphJobStateMachine`, captures the latest correlation identifier, and stores the optional `resultUri` in metadata for downstream exports. Repeated notifications are idempotent: if the job already reached a terminal state, the response returns the stored snapshot without publishing another event. When a `resultUri` value changes, only the metadata is refreshed—events and webhooks are emitted once per successful status transition.
### `GET /graphs/overlays/lag`
Returns per-tenant overlay lag metrics (counts, min/max/average lag seconds, and last five completions with correlation IDs + result URIs). Requires `graph:read`.
@@ -131,7 +131,6 @@ Response example:
`StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs` covers scope enforcement and the build-list happy path using the in-memory store. Future work should add overlay coverage once Cartographer adapters are available.
## Known gaps / TODO
- Persist jobs to Scheduler storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer (see new `SCHED-WEB-21-004`).
- Extend `GET /graphs/jobs` with pagination cursors shared with Cartographer/Console.
## Known gaps / TODO
- Extend `GET /graphs/jobs` with pagination cursors shared with Cartographer/Console.

View File

@@ -0,0 +1,70 @@
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Repositories;
using StellaOps.Scheduler.WebService.GraphJobs;
using Xunit;
namespace StellaOps.Scheduler.Storage.Mongo.Tests.Integration;
public sealed class GraphJobStoreTests
{
private static readonly DateTimeOffset OccurredAt = new(2025, 11, 4, 10, 30, 0, TimeSpan.Zero);
[Fact]
public async Task UpdateAsync_SucceedsWhenExpectedStatusMatches()
{
using var harness = new SchedulerMongoTestHarness();
var repository = new GraphJobRepository(harness.Context);
var store = new MongoGraphJobStore(repository);
var initial = CreateBuildJob();
await store.AddAsync(initial, CancellationToken.None);
var running = GraphJobStateMachine.EnsureTransition(initial, GraphJobStatus.Running, OccurredAt, attempts: initial.Attempts);
var completed = GraphJobStateMachine.EnsureTransition(running, GraphJobStatus.Completed, OccurredAt, attempts: running.Attempts + 1);
var updateResult = await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
Assert.True(updateResult.Updated);
var persisted = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
Assert.NotNull(persisted);
Assert.Equal(GraphJobStatus.Completed, persisted!.Status);
}
[Fact]
public async Task UpdateAsync_ReturnsExistingWhenExpectedStatusMismatch()
{
using var harness = new SchedulerMongoTestHarness();
var repository = new GraphJobRepository(harness.Context);
var store = new MongoGraphJobStore(repository);
var initial = CreateBuildJob();
await store.AddAsync(initial, CancellationToken.None);
var running = GraphJobStateMachine.EnsureTransition(initial, GraphJobStatus.Running, OccurredAt, attempts: initial.Attempts);
var completed = GraphJobStateMachine.EnsureTransition(running, GraphJobStatus.Completed, OccurredAt, attempts: running.Attempts + 1);
await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
var result = await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
Assert.False(result.Updated);
Assert.Equal(GraphJobStatus.Completed, result.Job.Status);
}
private static GraphBuildJob CreateBuildJob()
{
var digest = "sha256:" + new string('b', 64);
return new GraphBuildJob(
id: "gbj_store_test",
tenantId: "tenant-store",
sbomId: "sbom-alpha",
sbomVersionId: "sbom-alpha-v1",
sbomDigest: digest,
status: GraphJobStatus.Pending,
trigger: GraphBuildJobTrigger.SbomVersion,
createdAt: OccurredAt,
metadata: null);
}
}

View File

@@ -0,0 +1,218 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.WebService.GraphJobs;
using Xunit;
namespace StellaOps.Scheduler.WebService.Tests;
public sealed class GraphJobServiceTests
{
private static readonly DateTimeOffset FixedTime = new(2025, 11, 4, 12, 0, 0, TimeSpan.Zero);
[Fact]
public async Task CompleteBuildJob_PersistsMetadataAndPublishesOnce()
{
var store = new TrackingGraphJobStore();
var initial = CreateBuildJob();
await store.AddAsync(initial, CancellationToken.None);
var clock = new FixedClock(FixedTime);
var publisher = new RecordingPublisher();
var webhook = new RecordingWebhookClient();
var service = new GraphJobService(store, clock, publisher, webhook);
var request = new GraphJobCompletionRequest
{
JobId = initial.Id,
JobType = GraphJobQueryType.Build,
Status = GraphJobStatus.Completed,
OccurredAt = FixedTime,
GraphSnapshotId = "graph_snap_final ",
ResultUri = "oras://cartographer/bundle ",
CorrelationId = "corr-123 "
};
var response = await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
Assert.Equal(GraphJobStatus.Completed, response.Status);
Assert.Equal(1, store.BuildUpdateCount);
Assert.Single(publisher.Notifications);
Assert.Single(webhook.Notifications);
var stored = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
Assert.NotNull(stored);
Assert.Equal("graph_snap_final", stored!.GraphSnapshotId);
Assert.Equal("corr-123", stored.CorrelationId);
Assert.True(stored.Metadata.TryGetValue("resultUri", out var resultUri));
Assert.Equal("oras://cartographer/bundle", resultUri);
}
[Fact]
public async Task CompleteBuildJob_IsIdempotentWhenAlreadyCompleted()
{
var store = new TrackingGraphJobStore();
var initial = CreateBuildJob();
await store.AddAsync(initial, CancellationToken.None);
var clock = new FixedClock(FixedTime);
var publisher = new RecordingPublisher();
var webhook = new RecordingWebhookClient();
var service = new GraphJobService(store, clock, publisher, webhook);
var request = new GraphJobCompletionRequest
{
JobId = initial.Id,
JobType = GraphJobQueryType.Build,
Status = GraphJobStatus.Completed,
OccurredAt = FixedTime,
GraphSnapshotId = "graph_snap_final",
ResultUri = "oras://cartographer/bundle",
CorrelationId = "corr-123"
};
await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
var updateCountAfterFirst = store.BuildUpdateCount;
var secondResponse = await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
Assert.Equal(GraphJobStatus.Completed, secondResponse.Status);
Assert.Equal(updateCountAfterFirst, store.BuildUpdateCount);
Assert.Single(publisher.Notifications);
Assert.Single(webhook.Notifications);
}
[Fact]
public async Task CompleteBuildJob_UpdatesResultUriWithoutReemittingEvent()
{
var store = new TrackingGraphJobStore();
var initial = CreateBuildJob();
await store.AddAsync(initial, CancellationToken.None);
var clock = new FixedClock(FixedTime);
var publisher = new RecordingPublisher();
var webhook = new RecordingWebhookClient();
var service = new GraphJobService(store, clock, publisher, webhook);
var firstRequest = new GraphJobCompletionRequest
{
JobId = initial.Id,
JobType = GraphJobQueryType.Build,
Status = GraphJobStatus.Completed,
OccurredAt = FixedTime,
GraphSnapshotId = "graph_snap_final",
ResultUri = null,
CorrelationId = "corr-123"
};
await service.CompleteJobAsync(initial.TenantId, firstRequest, CancellationToken.None);
Assert.Equal(1, store.BuildUpdateCount);
Assert.Single(publisher.Notifications);
Assert.Single(webhook.Notifications);
var secondRequest = firstRequest with
{
ResultUri = "oras://cartographer/bundle-v2",
OccurredAt = FixedTime.AddSeconds(30)
};
var response = await service.CompleteJobAsync(initial.TenantId, secondRequest, CancellationToken.None);
Assert.Equal(GraphJobStatus.Completed, response.Status);
Assert.Equal(2, store.BuildUpdateCount);
Assert.Single(publisher.Notifications);
Assert.Single(webhook.Notifications);
var stored = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
Assert.NotNull(stored);
Assert.True(stored!.Metadata.TryGetValue("resultUri", out var resultUri));
Assert.Equal("oras://cartographer/bundle-v2", resultUri);
}
private static GraphBuildJob CreateBuildJob()
{
var digest = "sha256:" + new string('a', 64);
return new GraphBuildJob(
id: "gbj_test",
tenantId: "tenant-alpha",
sbomId: "sbom-alpha",
sbomVersionId: "sbom-alpha-v1",
sbomDigest: digest,
status: GraphJobStatus.Pending,
trigger: GraphBuildJobTrigger.SbomVersion,
createdAt: FixedTime,
metadata: null);
}
private sealed class TrackingGraphJobStore : IGraphJobStore
{
private readonly InMemoryGraphJobStore _inner = new();
public int BuildUpdateCount { get; private set; }
public int OverlayUpdateCount { get; private set; }
public ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
=> _inner.AddAsync(job, cancellationToken);
public ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
=> _inner.AddAsync(job, cancellationToken);
public ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
=> _inner.GetJobsAsync(tenantId, query, cancellationToken);
public ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> _inner.GetBuildJobAsync(tenantId, jobId, cancellationToken);
public ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
=> _inner.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
BuildUpdateCount++;
return await _inner.UpdateAsync(job, expectedStatus, cancellationToken);
}
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
{
OverlayUpdateCount++;
return await _inner.UpdateAsync(job, expectedStatus, cancellationToken);
}
public ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
=> _inner.GetOverlayJobsAsync(tenantId, cancellationToken);
}
private sealed class RecordingPublisher : IGraphJobCompletionPublisher
{
public List<GraphJobCompletionNotification> Notifications { get; } = new();
public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken)
{
Notifications.Add(notification);
return Task.CompletedTask;
}
}
private sealed class RecordingWebhookClient : ICartographerWebhookClient
{
public List<GraphJobCompletionNotification> Notifications { get; } = new();
public Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken)
{
Notifications.Add(notification);
return Task.CompletedTask;
}
}
private sealed class FixedClock : ISystemClock
{
public FixedClock(DateTimeOffset utcNow)
{
UtcNow = utcNow;
}
public DateTimeOffset UtcNow { get; set; }
}
}

View File

@@ -0,0 +1,13 @@
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution;
public interface IPackRunStepExecutor
{
Task<PackRunStepExecutionResult> ExecuteAsync(
PackRunExecutionStep step,
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
CancellationToken cancellationToken);
}
public sealed record PackRunStepExecutionResult(bool Succeeded, string? Error = null);

View File

@@ -0,0 +1,86 @@
using System.Collections.ObjectModel;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution;
public sealed class PackRunExecutionGraph
{
public static readonly TaskPackPlanFailurePolicy DefaultFailurePolicy = new(1, 0, ContinueOnError: false);
public PackRunExecutionGraph(IReadOnlyList<PackRunExecutionStep> steps, TaskPackPlanFailurePolicy? failurePolicy)
{
Steps = steps ?? throw new ArgumentNullException(nameof(steps));
FailurePolicy = failurePolicy ?? DefaultFailurePolicy;
}
public IReadOnlyList<PackRunExecutionStep> Steps { get; }
public TaskPackPlanFailurePolicy FailurePolicy { get; }
}
public enum PackRunStepKind
{
Unknown = 0,
Run,
GateApproval,
GatePolicy,
Parallel,
Map
}
public sealed class PackRunExecutionStep
{
public PackRunExecutionStep(
string id,
string templateId,
PackRunStepKind kind,
bool enabled,
string? uses,
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
string? approvalId,
string? gateMessage,
int? maxParallel,
bool continueOnError,
IReadOnlyList<PackRunExecutionStep> children)
{
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(id)) : id;
TemplateId = string.IsNullOrWhiteSpace(templateId) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(templateId)) : templateId;
Kind = kind;
Enabled = enabled;
Uses = uses;
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
ApprovalId = approvalId;
GateMessage = gateMessage;
MaxParallel = maxParallel;
ContinueOnError = continueOnError;
Children = children ?? throw new ArgumentNullException(nameof(children));
}
public string Id { get; }
public string TemplateId { get; }
public PackRunStepKind Kind { get; }
public bool Enabled { get; }
public string? Uses { get; }
public IReadOnlyDictionary<string, TaskPackPlanParameterValue> Parameters { get; }
public string? ApprovalId { get; }
public string? GateMessage { get; }
public int? MaxParallel { get; }
public bool ContinueOnError { get; }
public IReadOnlyList<PackRunExecutionStep> Children { get; }
public static IReadOnlyDictionary<string, TaskPackPlanParameterValue> EmptyParameters { get; } =
new ReadOnlyDictionary<string, TaskPackPlanParameterValue>(new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal));
public static IReadOnlyList<PackRunExecutionStep> EmptyChildren { get; } =
Array.Empty<PackRunExecutionStep>();
}

View File

@@ -0,0 +1,77 @@
using System.Collections.ObjectModel;
using System.Text.Json.Nodes;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution;
public sealed class PackRunExecutionGraphBuilder
{
public PackRunExecutionGraph Build(TaskPackPlan plan)
{
ArgumentNullException.ThrowIfNull(plan);
var steps = plan.Steps.Select(ConvertStep).ToList();
var failurePolicy = plan.FailurePolicy;
return new PackRunExecutionGraph(steps, failurePolicy);
}
private static PackRunExecutionStep ConvertStep(TaskPackPlanStep step)
{
var kind = DetermineKind(step.Type);
var parameters = step.Parameters is null
? PackRunExecutionStep.EmptyParameters
: new ReadOnlyDictionary<string, TaskPackPlanParameterValue>(
new Dictionary<string, TaskPackPlanParameterValue>(step.Parameters, StringComparer.Ordinal));
var children = step.Children is null
? PackRunExecutionStep.EmptyChildren
: step.Children.Select(ConvertStep).ToList();
var maxParallel = TryGetInt(parameters, "maxParallel");
var continueOnError = TryGetBool(parameters, "continueOnError");
return new PackRunExecutionStep(
step.Id,
step.TemplateId,
kind,
step.Enabled,
step.Uses,
parameters,
step.ApprovalId,
step.GateMessage,
maxParallel,
continueOnError,
children);
}
private static PackRunStepKind DetermineKind(string? type)
=> type switch
{
"run" => PackRunStepKind.Run,
"gate.approval" => PackRunStepKind.GateApproval,
"gate.policy" => PackRunStepKind.GatePolicy,
"parallel" => PackRunStepKind.Parallel,
"map" => PackRunStepKind.Map,
_ => PackRunStepKind.Unknown
};
private static int? TryGetInt(IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters, string key)
{
if (!parameters.TryGetValue(key, out var value) || value.Value is not JsonValue jsonValue)
{
return null;
}
return jsonValue.TryGetValue<int>(out var result) ? result : null;
}
private static bool TryGetBool(IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters, string key)
{
if (!parameters.TryGetValue(key, out var value) || value.Value is not JsonValue jsonValue)
{
return false;
}
return jsonValue.TryGetValue<bool>(out var result) && result;
}
}

View File

@@ -0,0 +1,159 @@
using System.Collections.ObjectModel;
using System.Linq;
namespace StellaOps.TaskRunner.Core.Execution;
public static class PackRunGateStateUpdater
{
public static PackRunGateStateUpdateResult Apply(
PackRunState state,
PackRunExecutionGraph graph,
PackRunApprovalCoordinator coordinator,
DateTimeOffset timestamp)
{
ArgumentNullException.ThrowIfNull(state);
ArgumentNullException.ThrowIfNull(graph);
ArgumentNullException.ThrowIfNull(coordinator);
var approvals = coordinator.GetApprovals()
.SelectMany(approval => approval.StepIds.Select(stepId => (stepId, approval)))
.GroupBy(tuple => tuple.stepId, StringComparer.Ordinal)
.ToDictionary(
group => group.Key,
group => group.First().approval,
StringComparer.Ordinal);
var mutable = new Dictionary<string, PackRunStepStateRecord>(state.Steps, StringComparer.Ordinal);
var changed = false;
var hasBlockingFailure = false;
foreach (var step in EnumerateSteps(graph.Steps))
{
if (!mutable.TryGetValue(step.Id, out var record))
{
continue;
}
switch (step.Kind)
{
case PackRunStepKind.GateApproval:
if (!approvals.TryGetValue(step.Id, out var approvalState))
{
continue;
}
switch (approvalState.Status)
{
case PackRunApprovalStatus.Pending:
break;
case PackRunApprovalStatus.Approved:
if (record.Status != PackRunStepExecutionStatus.Succeeded || record.StatusReason is not null)
{
mutable[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = timestamp,
NextAttemptAt = null
};
changed = true;
}
break;
case PackRunApprovalStatus.Rejected:
case PackRunApprovalStatus.Expired:
var failureReason = BuildFailureReason(approvalState);
if (record.Status != PackRunStepExecutionStatus.Failed ||
!string.Equals(record.StatusReason, failureReason, StringComparison.Ordinal))
{
mutable[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Failed,
StatusReason = failureReason,
LastTransitionAt = timestamp,
NextAttemptAt = null
};
changed = true;
}
hasBlockingFailure = true;
break;
}
break;
case PackRunStepKind.GatePolicy:
if (record.Status == PackRunStepExecutionStatus.Pending &&
string.Equals(record.StatusReason, "requires-policy", StringComparison.Ordinal))
{
mutable[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = timestamp,
NextAttemptAt = null
};
changed = true;
}
break;
}
}
if (!changed)
{
return new PackRunGateStateUpdateResult(state, hasBlockingFailure);
}
var updatedState = state with
{
UpdatedAt = timestamp,
Steps = new ReadOnlyDictionary<string, PackRunStepStateRecord>(mutable)
};
return new PackRunGateStateUpdateResult(updatedState, hasBlockingFailure);
}
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
{
if (steps.Count == 0)
{
yield break;
}
foreach (var step in steps)
{
yield return step;
if (step.Children.Count > 0)
{
foreach (var child in EnumerateSteps(step.Children))
{
yield return child;
}
}
}
}
private static string BuildFailureReason(PackRunApprovalState state)
{
var baseReason = state.Status switch
{
PackRunApprovalStatus.Rejected => "approval-rejected",
PackRunApprovalStatus.Expired => "approval-expired",
_ => "approval-invalid"
};
if (string.IsNullOrWhiteSpace(state.Summary))
{
return baseReason;
}
var summary = state.Summary.Trim();
return $"{baseReason}:{summary}";
}
}
public readonly record struct PackRunGateStateUpdateResult(PackRunState State, bool HasBlockingFailure);

View File

@@ -0,0 +1,50 @@
using System.Collections.ObjectModel;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution;
public sealed record PackRunState(
string RunId,
string PlanHash,
TaskPackPlanFailurePolicy FailurePolicy,
DateTimeOffset CreatedAt,
DateTimeOffset UpdatedAt,
IReadOnlyDictionary<string, PackRunStepStateRecord> Steps)
{
public static PackRunState Create(
string runId,
string planHash,
TaskPackPlanFailurePolicy failurePolicy,
IReadOnlyDictionary<string, PackRunStepStateRecord> steps,
DateTimeOffset timestamp)
=> new(
runId,
planHash,
failurePolicy,
timestamp,
timestamp,
new ReadOnlyDictionary<string, PackRunStepStateRecord>(new Dictionary<string, PackRunStepStateRecord>(steps, StringComparer.Ordinal)));
}
public sealed record PackRunStepStateRecord(
string StepId,
PackRunStepKind Kind,
bool Enabled,
bool ContinueOnError,
int? MaxParallel,
string? ApprovalId,
string? GateMessage,
PackRunStepExecutionStatus Status,
int Attempts,
DateTimeOffset? LastTransitionAt,
DateTimeOffset? NextAttemptAt,
string? StatusReason);
public interface IPackRunStateStore
{
Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken);
Task SaveAsync(PackRunState state, CancellationToken cancellationToken);
Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,121 @@
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution;
public static class PackRunStepStateMachine
{
public static PackRunStepState Create(DateTimeOffset? createdAt = null)
=> new(PackRunStepExecutionStatus.Pending, Attempts: 0, createdAt, NextAttemptAt: null);
public static PackRunStepState Start(PackRunStepState state, DateTimeOffset startedAt)
{
ArgumentNullException.ThrowIfNull(state);
if (state.Status is not PackRunStepExecutionStatus.Pending)
{
throw new InvalidOperationException($"Cannot start step from status {state.Status}.");
}
return state with
{
Status = PackRunStepExecutionStatus.Running,
LastTransitionAt = startedAt,
NextAttemptAt = null
};
}
public static PackRunStepState CompleteSuccess(PackRunStepState state, DateTimeOffset completedAt)
{
ArgumentNullException.ThrowIfNull(state);
if (state.Status is not PackRunStepExecutionStatus.Running)
{
throw new InvalidOperationException($"Cannot complete step from status {state.Status}.");
}
return state with
{
Status = PackRunStepExecutionStatus.Succeeded,
Attempts = state.Attempts + 1,
LastTransitionAt = completedAt,
NextAttemptAt = null
};
}
public static PackRunStepFailureResult RegisterFailure(
PackRunStepState state,
DateTimeOffset failedAt,
TaskPackPlanFailurePolicy failurePolicy)
{
ArgumentNullException.ThrowIfNull(state);
ArgumentNullException.ThrowIfNull(failurePolicy);
if (state.Status is not PackRunStepExecutionStatus.Running)
{
throw new InvalidOperationException($"Cannot register failure from status {state.Status}.");
}
var attempts = state.Attempts + 1;
if (attempts < failurePolicy.MaxAttempts)
{
var backoff = TimeSpan.FromSeconds(Math.Max(0, failurePolicy.BackoffSeconds));
var nextAttemptAt = failedAt + backoff;
var nextState = state with
{
Status = PackRunStepExecutionStatus.Pending,
Attempts = attempts,
LastTransitionAt = failedAt,
NextAttemptAt = nextAttemptAt
};
return new PackRunStepFailureResult(nextState, PackRunStepFailureOutcome.Retry);
}
var finalState = state with
{
Status = PackRunStepExecutionStatus.Failed,
Attempts = attempts,
LastTransitionAt = failedAt,
NextAttemptAt = null
};
return new PackRunStepFailureResult(finalState, PackRunStepFailureOutcome.Abort);
}
public static PackRunStepState Skip(PackRunStepState state, DateTimeOffset skippedAt)
{
ArgumentNullException.ThrowIfNull(state);
if (state.Status is not PackRunStepExecutionStatus.Pending)
{
throw new InvalidOperationException($"Cannot skip step from status {state.Status}.");
}
return state with
{
Status = PackRunStepExecutionStatus.Skipped,
LastTransitionAt = skippedAt,
NextAttemptAt = null
};
}
}
public sealed record PackRunStepState(
PackRunStepExecutionStatus Status,
int Attempts,
DateTimeOffset? LastTransitionAt,
DateTimeOffset? NextAttemptAt);
public enum PackRunStepExecutionStatus
{
Pending = 0,
Running,
Succeeded,
Failed,
Skipped
}
public readonly record struct PackRunStepFailureResult(PackRunStepState State, PackRunStepFailureOutcome Outcome);
public enum PackRunStepFailureOutcome
{
Retry = 0,
Abort
}

View File

@@ -0,0 +1,78 @@
using System.Collections.ObjectModel;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution.Simulation;
public sealed class PackRunSimulationEngine
{
private readonly PackRunExecutionGraphBuilder graphBuilder;
public PackRunSimulationEngine()
{
graphBuilder = new PackRunExecutionGraphBuilder();
}
public PackRunSimulationResult Simulate(TaskPackPlan plan)
{
ArgumentNullException.ThrowIfNull(plan);
var graph = graphBuilder.Build(plan);
var steps = graph.Steps.Select(ConvertStep).ToList();
var outputs = BuildOutputs(plan.Outputs);
return new PackRunSimulationResult(steps, outputs, graph.FailurePolicy);
}
private static PackRunSimulationNode ConvertStep(PackRunExecutionStep step)
{
var status = DetermineStatus(step);
var children = step.Children.Count == 0
? PackRunSimulationNode.Empty
: new ReadOnlyCollection<PackRunSimulationNode>(step.Children.Select(ConvertStep).ToList());
return new PackRunSimulationNode(
step.Id,
step.TemplateId,
step.Kind,
step.Enabled,
step.Uses,
step.ApprovalId,
step.GateMessage,
step.Parameters,
step.MaxParallel,
step.ContinueOnError,
status,
children);
}
private static PackRunSimulationStatus DetermineStatus(PackRunExecutionStep step)
{
if (!step.Enabled)
{
return PackRunSimulationStatus.Skipped;
}
return step.Kind switch
{
PackRunStepKind.GateApproval => PackRunSimulationStatus.RequiresApproval,
PackRunStepKind.GatePolicy => PackRunSimulationStatus.RequiresPolicy,
_ => PackRunSimulationStatus.Pending
};
}
private static IReadOnlyList<PackRunSimulationOutput> BuildOutputs(IReadOnlyList<TaskPackPlanOutput> outputs)
{
if (outputs.Count == 0)
{
return PackRunSimulationOutput.Empty;
}
var list = new List<PackRunSimulationOutput>(outputs.Count);
foreach (var output in outputs)
{
list.Add(new PackRunSimulationOutput(output.Name, output.Type, output.Path, output.Expression));
}
return new ReadOnlyCollection<PackRunSimulationOutput>(list);
}
}

View File

@@ -0,0 +1,131 @@
using System.Collections.ObjectModel;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Core.Execution.Simulation;
public sealed class PackRunSimulationResult
{
public PackRunSimulationResult(
IReadOnlyList<PackRunSimulationNode> steps,
IReadOnlyList<PackRunSimulationOutput> outputs,
TaskPackPlanFailurePolicy failurePolicy)
{
Steps = steps ?? throw new ArgumentNullException(nameof(steps));
Outputs = outputs ?? throw new ArgumentNullException(nameof(outputs));
FailurePolicy = failurePolicy ?? throw new ArgumentNullException(nameof(failurePolicy));
}
public IReadOnlyList<PackRunSimulationNode> Steps { get; }
public IReadOnlyList<PackRunSimulationOutput> Outputs { get; }
public TaskPackPlanFailurePolicy FailurePolicy { get; }
public bool HasPendingApprovals => Steps.Any(ContainsApprovalRequirement);
private static bool ContainsApprovalRequirement(PackRunSimulationNode node)
{
if (node.Status is PackRunSimulationStatus.RequiresApproval or PackRunSimulationStatus.RequiresPolicy)
{
return true;
}
return node.Children.Any(ContainsApprovalRequirement);
}
}
public sealed class PackRunSimulationNode
{
public PackRunSimulationNode(
string id,
string templateId,
PackRunStepKind kind,
bool enabled,
string? uses,
string? approvalId,
string? gateMessage,
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
int? maxParallel,
bool continueOnError,
PackRunSimulationStatus status,
IReadOnlyList<PackRunSimulationNode> children)
{
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(id)) : id;
TemplateId = string.IsNullOrWhiteSpace(templateId) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(templateId)) : templateId;
Kind = kind;
Enabled = enabled;
Uses = uses;
ApprovalId = approvalId;
GateMessage = gateMessage;
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
MaxParallel = maxParallel;
ContinueOnError = continueOnError;
Status = status;
Children = children ?? throw new ArgumentNullException(nameof(children));
}
public string Id { get; }
public string TemplateId { get; }
public PackRunStepKind Kind { get; }
public bool Enabled { get; }
public string? Uses { get; }
public string? ApprovalId { get; }
public string? GateMessage { get; }
public IReadOnlyDictionary<string, TaskPackPlanParameterValue> Parameters { get; }
public int? MaxParallel { get; }
public bool ContinueOnError { get; }
public PackRunSimulationStatus Status { get; }
public IReadOnlyList<PackRunSimulationNode> Children { get; }
public static IReadOnlyList<PackRunSimulationNode> Empty { get; } =
new ReadOnlyCollection<PackRunSimulationNode>(Array.Empty<PackRunSimulationNode>());
}
public enum PackRunSimulationStatus
{
Pending = 0,
Skipped,
RequiresApproval,
RequiresPolicy
}
public sealed class PackRunSimulationOutput
{
public PackRunSimulationOutput(
string name,
string type,
TaskPackPlanParameterValue? path,
TaskPackPlanParameterValue? expression)
{
Name = string.IsNullOrWhiteSpace(name) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(name)) : name;
Type = string.IsNullOrWhiteSpace(type) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(type)) : type;
Path = path;
Expression = expression;
}
public string Name { get; }
public string Type { get; }
public TaskPackPlanParameterValue? Path { get; }
public TaskPackPlanParameterValue? Expression { get; }
public bool RequiresRuntimeValue =>
(Path?.RequiresRuntimeValue ?? false) ||
(Expression?.RequiresRuntimeValue ?? false);
public static IReadOnlyList<PackRunSimulationOutput> Empty { get; } =
new ReadOnlyCollection<PackRunSimulationOutput>(Array.Empty<PackRunSimulationOutput>());
}

View File

@@ -12,36 +12,40 @@ public sealed class TaskPackPlan
IReadOnlyList<TaskPackPlanStep> steps,
string hash,
IReadOnlyList<TaskPackPlanApproval> approvals,
IReadOnlyList<TaskPackPlanSecret> secrets,
IReadOnlyList<TaskPackPlanOutput> outputs)
{
Metadata = metadata;
Inputs = inputs;
Steps = steps;
Hash = hash;
Approvals = approvals;
Secrets = secrets;
Outputs = outputs;
}
public TaskPackPlanMetadata Metadata { get; }
public IReadOnlyDictionary<string, JsonNode?> Inputs { get; }
IReadOnlyList<TaskPackPlanSecret> secrets,
IReadOnlyList<TaskPackPlanOutput> outputs,
TaskPackPlanFailurePolicy? failurePolicy)
{
Metadata = metadata;
Inputs = inputs;
Steps = steps;
Hash = hash;
Approvals = approvals;
Secrets = secrets;
Outputs = outputs;
FailurePolicy = failurePolicy;
}
public TaskPackPlanMetadata Metadata { get; }
public IReadOnlyDictionary<string, JsonNode?> Inputs { get; }
public IReadOnlyList<TaskPackPlanStep> Steps { get; }
public string Hash { get; }
public IReadOnlyList<TaskPackPlanApproval> Approvals { get; }
public IReadOnlyList<TaskPackPlanSecret> Secrets { get; }
public IReadOnlyList<TaskPackPlanOutput> Outputs { get; }
}
public sealed record TaskPackPlanMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags);
public sealed record TaskPackPlanStep(
public IReadOnlyList<TaskPackPlanSecret> Secrets { get; }
public IReadOnlyList<TaskPackPlanOutput> Outputs { get; }
public TaskPackPlanFailurePolicy? FailurePolicy { get; }
}
public sealed record TaskPackPlanMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags);
public sealed record TaskPackPlanStep(
string Id,
string TemplateId,
string? Name,
@@ -71,11 +75,16 @@ public sealed record TaskPackPlanApproval(
public sealed record TaskPackPlanSecret(string Name, string Scope, string? Description);
public sealed record TaskPackPlanOutput(
string Name,
string Type,
TaskPackPlanParameterValue? Path,
TaskPackPlanParameterValue? Expression);
public sealed record TaskPackPlanOutput(
string Name,
string Type,
TaskPackPlanParameterValue? Path,
TaskPackPlanParameterValue? Expression);
public sealed record TaskPackPlanFailurePolicy(
int MaxAttempts,
int BackoffSeconds,
bool ContinueOnError);
public sealed class TaskPackPlanResult
{

View File

@@ -13,13 +13,14 @@ internal static class TaskPackPlanHasher
IReadOnlyDictionary<string, JsonNode?> inputs,
IReadOnlyList<TaskPackPlanStep> steps,
IReadOnlyList<TaskPackPlanApproval> approvals,
IReadOnlyList<TaskPackPlanSecret> secrets,
IReadOnlyList<TaskPackPlanOutput> outputs)
{
var canonical = new CanonicalPlan(
new CanonicalMetadata(metadata.Name, metadata.Version, metadata.Description, metadata.Tags),
inputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal),
steps.Select(ToCanonicalStep).ToList(),
IReadOnlyList<TaskPackPlanSecret> secrets,
IReadOnlyList<TaskPackPlanOutput> outputs,
TaskPackPlanFailurePolicy? failurePolicy)
{
var canonical = new CanonicalPlan(
new CanonicalMetadata(metadata.Name, metadata.Version, metadata.Description, metadata.Tags),
inputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal),
steps.Select(ToCanonicalStep).ToList(),
approvals
.OrderBy(a => a.Id, StringComparer.Ordinal)
.Select(a => new CanonicalApproval(a.Id, a.Grants.OrderBy(g => g, StringComparer.Ordinal).ToList(), a.ExpiresAfter, a.ReasonTemplate))
@@ -27,16 +28,19 @@ internal static class TaskPackPlanHasher
secrets
.OrderBy(s => s.Name, StringComparer.Ordinal)
.Select(s => new CanonicalSecret(s.Name, s.Scope, s.Description))
.ToList(),
outputs
.OrderBy(o => o.Name, StringComparer.Ordinal)
.Select(ToCanonicalOutput)
.ToList());
var json = CanonicalJson.Serialize(canonical);
using var sha256 = SHA256.Create();
var hashBytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(json));
return ConvertToHex(hashBytes);
.ToList(),
outputs
.OrderBy(o => o.Name, StringComparer.Ordinal)
.Select(ToCanonicalOutput)
.ToList(),
failurePolicy is null
? null
: new CanonicalFailurePolicy(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError));
var json = CanonicalJson.Serialize(canonical);
using var sha256 = SHA256.Create();
var hashBytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(json));
return ConvertToHex(hashBytes);
}
private static string ConvertToHex(byte[] hashBytes)
@@ -66,13 +70,14 @@ internal static class TaskPackPlanHasher
step.GateMessage,
step.Children?.Select(ToCanonicalStep).ToList());
private sealed record CanonicalPlan(
CanonicalMetadata Metadata,
IDictionary<string, JsonNode?> Inputs,
IReadOnlyList<CanonicalPlanStep> Steps,
IReadOnlyList<CanonicalApproval> Approvals,
IReadOnlyList<CanonicalSecret> Secrets,
IReadOnlyList<CanonicalOutput> Outputs);
private sealed record CanonicalPlan(
CanonicalMetadata Metadata,
IDictionary<string, JsonNode?> Inputs,
IReadOnlyList<CanonicalPlanStep> Steps,
IReadOnlyList<CanonicalApproval> Approvals,
IReadOnlyList<CanonicalSecret> Secrets,
IReadOnlyList<CanonicalOutput> Outputs,
CanonicalFailurePolicy? FailurePolicy);
private sealed record CanonicalMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags);
@@ -92,13 +97,15 @@ internal static class TaskPackPlanHasher
private sealed record CanonicalSecret(string Name, string Scope, string? Description);
private sealed record CanonicalParameter(JsonNode? Value, string? Expression, string? Error, bool RequiresRuntimeValue);
private sealed record CanonicalOutput(
string Name,
string Type,
CanonicalParameter? Path,
CanonicalParameter? Expression);
private sealed record CanonicalParameter(JsonNode? Value, string? Expression, string? Error, bool RequiresRuntimeValue);
private sealed record CanonicalOutput(
string Name,
string Type,
CanonicalParameter? Path,
CanonicalParameter? Expression);
private sealed record CanonicalFailurePolicy(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
private static CanonicalOutput ToCanonicalOutput(TaskPackPlanOutput output)
=> new(

View File

@@ -98,14 +98,16 @@ public sealed class TaskPackPlanner
return new TaskPackPlanResult(null, errors.ToImmutable());
}
var hash = TaskPackPlanHasher.ComputeHash(metadata, effectiveInputs, planSteps, planApprovals, planSecrets, planOutputs);
var plan = new TaskPackPlan(metadata, effectiveInputs, planSteps, hash, planApprovals, planSecrets, planOutputs);
return new TaskPackPlanResult(plan, ImmutableArray<TaskPackPlanError>.Empty);
}
private Dictionary<string, JsonNode?> MaterializeInputs(
IReadOnlyList<TaskPackInput>? definitions,
var failurePolicy = MaterializeFailurePolicy(manifest.Spec.Failure);
var hash = TaskPackPlanHasher.ComputeHash(metadata, effectiveInputs, planSteps, planApprovals, planSecrets, planOutputs, failurePolicy);
var plan = new TaskPackPlan(metadata, effectiveInputs, planSteps, hash, planApprovals, planSecrets, planOutputs, failurePolicy);
return new TaskPackPlanResult(plan, ImmutableArray<TaskPackPlanError>.Empty);
}
private Dictionary<string, JsonNode?> MaterializeInputs(
IReadOnlyList<TaskPackInput>? definitions,
IDictionary<string, JsonNode?>? providedInputs,
ImmutableArray<TaskPackPlanError>.Builder errors)
{
@@ -141,9 +143,22 @@ public sealed class TaskPackPlanner
}
}
return effective;
}
return effective;
}
private static TaskPackPlanFailurePolicy? MaterializeFailurePolicy(TaskPackFailure? failure)
{
if (failure?.Retries is not TaskPackRetryPolicy retries)
{
return null;
}
var maxAttempts = retries.MaxAttempts <= 0 ? 1 : retries.MaxAttempts;
var backoffSeconds = retries.BackoffSeconds < 0 ? 0 : retries.BackoffSeconds;
return new TaskPackPlanFailurePolicy(maxAttempts, backoffSeconds, ContinueOnError: false);
}
private TaskPackPlanStep BuildStep(
string packName,
string packVersion,

View File

@@ -0,0 +1,191 @@
using System.Text.Json;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
/// <summary>
/// File-system backed implementation of <see cref="IPackRunStateStore"/> intended for development and air-gapped smoke tests.
/// </summary>
public sealed class FilePackRunStateStore : IPackRunStateStore
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
private readonly string rootPath;
private readonly SemaphoreSlim mutex = new(1, 1);
public FilePackRunStateStore(string rootPath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
this.rootPath = Path.GetFullPath(rootPath);
Directory.CreateDirectory(this.rootPath);
}
public async Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var path = GetPath(runId);
if (!File.Exists(path))
{
return null;
}
await using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read);
var document = await JsonSerializer.DeserializeAsync<StateDocument>(stream, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
return document?.ToDomain();
}
public async Task SaveAsync(PackRunState state, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(state);
var path = GetPath(state.RunId);
var document = StateDocument.FromDomain(state);
Directory.CreateDirectory(rootPath);
await mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
}
finally
{
mutex.Release();
}
}
public async Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken)
{
if (!Directory.Exists(rootPath))
{
return Array.Empty<PackRunState>();
}
var states = new List<PackRunState>();
var files = Directory.EnumerateFiles(rootPath, "*.json", SearchOption.TopDirectoryOnly)
.OrderBy(file => file, StringComparer.Ordinal);
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
await using var stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read);
var document = await JsonSerializer.DeserializeAsync<StateDocument>(stream, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
if (document is not null)
{
states.Add(document.ToDomain());
}
}
return states;
}
private string GetPath(string runId)
{
var safeName = SanitizeFileName(runId);
return Path.Combine(rootPath, $"{safeName}.json");
}
private static string SanitizeFileName(string value)
{
var result = value.Trim();
foreach (var invalid in Path.GetInvalidFileNameChars())
{
result = result.Replace(invalid, '_');
}
return result;
}
private sealed record StateDocument(
string RunId,
string PlanHash,
TaskPackPlanFailurePolicy FailurePolicy,
DateTimeOffset CreatedAt,
DateTimeOffset UpdatedAt,
IReadOnlyList<StepDocument> Steps)
{
public static StateDocument FromDomain(PackRunState state)
{
var steps = state.Steps.Values
.OrderBy(step => step.StepId, StringComparer.Ordinal)
.Select(step => new StepDocument(
step.StepId,
step.Kind,
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
step.Status,
step.Attempts,
step.LastTransitionAt,
step.NextAttemptAt,
step.StatusReason))
.ToList();
return new StateDocument(
state.RunId,
state.PlanHash,
state.FailurePolicy,
state.CreatedAt,
state.UpdatedAt,
steps);
}
public PackRunState ToDomain()
{
var steps = Steps.ToDictionary(
step => step.StepId,
step => new PackRunStepStateRecord(
step.StepId,
step.Kind,
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
step.Status,
step.Attempts,
step.LastTransitionAt,
step.NextAttemptAt,
step.StatusReason),
StringComparer.Ordinal);
return new PackRunState(
RunId,
PlanHash,
FailurePolicy,
CreatedAt,
UpdatedAt,
steps);
}
}
private sealed record StepDocument(
string StepId,
PackRunStepKind Kind,
bool Enabled,
bool ContinueOnError,
int? MaxParallel,
string? ApprovalId,
string? GateMessage,
PackRunStepExecutionStatus Status,
int Attempts,
DateTimeOffset? LastTransitionAt,
DateTimeOffset? NextAttemptAt,
string? StatusReason);
}

View File

@@ -0,0 +1,24 @@
using System.Text.Json.Nodes;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
public sealed class NoopPackRunStepExecutor : IPackRunStepExecutor
{
public Task<PackRunStepExecutionResult> ExecuteAsync(
PackRunExecutionStep step,
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
CancellationToken cancellationToken)
{
if (parameters.TryGetValue("simulateFailure", out var value) &&
value.Value is JsonValue jsonValue &&
jsonValue.TryGetValue<bool>(out var failure) &&
failure)
{
return Task.FromResult(new PackRunStepExecutionResult(false, "Simulated failure requested."));
}
return Task.FromResult(new PackRunStepExecutionResult(true));
}
}

View File

@@ -0,0 +1,105 @@
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
using StellaOps.TaskRunner.Infrastructure.Execution;
namespace StellaOps.TaskRunner.Tests;
public sealed class FilePackRunStateStoreTests
{
[Fact]
public async Task SaveAndGetAsync_RoundTripsState()
{
var directory = CreateTempDirectory();
try
{
var store = new FilePackRunStateStore(directory);
var original = CreateState("run:primary");
await store.SaveAsync(original, CancellationToken.None);
var reloaded = await store.GetAsync("run:primary", CancellationToken.None);
Assert.NotNull(reloaded);
Assert.Equal(original.RunId, reloaded!.RunId);
Assert.Equal(original.PlanHash, reloaded.PlanHash);
Assert.Equal(original.FailurePolicy, reloaded.FailurePolicy);
Assert.Equal(original.Steps.Count, reloaded.Steps.Count);
var step = Assert.Single(reloaded.Steps);
Assert.Equal("step-a", step.Key);
Assert.Equal(original.Steps["step-a"], step.Value);
}
finally
{
TryDelete(directory);
}
}
[Fact]
public async Task ListAsync_ReturnsStatesInDeterministicOrder()
{
var directory = CreateTempDirectory();
try
{
var store = new FilePackRunStateStore(directory);
var stateB = CreateState("run-b");
var stateA = CreateState("run-a");
await store.SaveAsync(stateB, CancellationToken.None);
await store.SaveAsync(stateA, CancellationToken.None);
var states = await store.ListAsync(CancellationToken.None);
Assert.Collection(states,
first => Assert.Equal("run-a", first.RunId),
second => Assert.Equal("run-b", second.RunId));
}
finally
{
TryDelete(directory);
}
}
private static PackRunState CreateState(string runId)
{
var failurePolicy = new TaskPackPlanFailurePolicy(MaxAttempts: 3, BackoffSeconds: 30, ContinueOnError: false);
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal)
{
["step-a"] = new PackRunStepStateRecord(
StepId: "step-a",
Kind: PackRunStepKind.Run,
Enabled: true,
ContinueOnError: false,
MaxParallel: null,
ApprovalId: null,
GateMessage: null,
Status: PackRunStepExecutionStatus.Pending,
Attempts: 1,
LastTransitionAt: DateTimeOffset.UtcNow,
NextAttemptAt: null,
StatusReason: null)
};
return PackRunState.Create(runId, "hash-123", failurePolicy, steps, DateTimeOffset.UtcNow);
}
private static string CreateTempDirectory()
{
var path = Path.Combine(Path.GetTempPath(), "stellaops-taskrunner-tests", Guid.NewGuid().ToString("N"));
Directory.CreateDirectory(path);
return path;
}
private static void TryDelete(string directory)
{
try
{
if (Directory.Exists(directory))
{
Directory.Delete(directory, recursive: true);
}
}
catch
{
// Swallow cleanup errors to avoid masking test assertions.
}
}
}

View File

@@ -0,0 +1,68 @@
using System.Text.Json.Nodes;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Tests;
public sealed class PackRunExecutionGraphBuilderTests
{
[Fact]
public void Build_GeneratesParallelMetadata()
{
var manifest = TestManifests.Load(TestManifests.Parallel);
var planner = new TaskPackPlanner();
var result = planner.Plan(manifest);
Assert.True(result.Success);
var plan = result.Plan!;
var builder = new PackRunExecutionGraphBuilder();
var graph = builder.Build(plan);
Assert.Equal(2, graph.FailurePolicy.MaxAttempts);
Assert.Equal(10, graph.FailurePolicy.BackoffSeconds);
var parallel = Assert.Single(graph.Steps);
Assert.Equal(PackRunStepKind.Parallel, parallel.Kind);
Assert.True(parallel.Enabled);
Assert.Equal(2, parallel.MaxParallel);
Assert.True(parallel.ContinueOnError);
Assert.Equal(2, parallel.Children.Count);
Assert.All(parallel.Children, child => Assert.Equal(PackRunStepKind.Run, child.Kind));
}
[Fact]
public void Build_PreservesMapIterationsAndDisabledSteps()
{
var planner = new TaskPackPlanner();
var builder = new PackRunExecutionGraphBuilder();
// Map iterations
var mapManifest = TestManifests.Load(TestManifests.Map);
var inputs = new Dictionary<string, JsonNode?>
{
["targets"] = new JsonArray("alpha", "beta", "gamma")
};
var mapPlan = planner.Plan(mapManifest, inputs).Plan!;
var mapGraph = builder.Build(mapPlan);
var mapStep = Assert.Single(mapGraph.Steps);
Assert.Equal(PackRunStepKind.Map, mapStep.Kind);
Assert.Equal(3, mapStep.Children.Count);
Assert.All(mapStep.Children, child => Assert.Equal(PackRunStepKind.Run, child.Kind));
// Disabled conditional step
var conditionalManifest = TestManifests.Load(TestManifests.Sample);
var conditionalInputs = new Dictionary<string, JsonNode?>
{
["dryRun"] = JsonValue.Create(true)
};
var conditionalPlan = planner.Plan(conditionalManifest, conditionalInputs).Plan!;
var conditionalGraph = builder.Build(conditionalPlan);
var applyStep = conditionalGraph.Steps.Single(step => step.Id == "apply-step");
Assert.False(applyStep.Enabled);
}
}

View File

@@ -0,0 +1,150 @@
using System;
using System.Collections.Generic;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Tests;
public sealed class PackRunGateStateUpdaterTests
{
private static readonly DateTimeOffset RequestedAt = DateTimeOffset.UnixEpoch;
private static readonly DateTimeOffset UpdateTimestamp = DateTimeOffset.UnixEpoch.AddMinutes(5);
[Fact]
public void Apply_ApprovedGate_ClearsReasonAndSucceeds()
{
var plan = BuildApprovalPlan();
var graph = new PackRunExecutionGraphBuilder().Build(plan);
var state = CreateInitialState(plan, graph);
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
coordinator.Approve("security-review", "approver-1", UpdateTimestamp);
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
Assert.False(result.HasBlockingFailure);
Assert.Equal(UpdateTimestamp, result.State.UpdatedAt);
var gate = result.State.Steps["approval"];
Assert.Equal(PackRunStepExecutionStatus.Succeeded, gate.Status);
Assert.Null(gate.StatusReason);
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
}
[Fact]
public void Apply_RejectedGate_FlagsFailure()
{
var plan = BuildApprovalPlan();
var graph = new PackRunExecutionGraphBuilder().Build(plan);
var state = CreateInitialState(plan, graph);
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
coordinator.Reject("security-review", "approver-1", UpdateTimestamp, "not-safe");
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
Assert.True(result.HasBlockingFailure);
Assert.Equal(UpdateTimestamp, result.State.UpdatedAt);
var gate = result.State.Steps["approval"];
Assert.Equal(PackRunStepExecutionStatus.Failed, gate.Status);
Assert.StartsWith("approval-rejected", gate.StatusReason, StringComparison.Ordinal);
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
}
[Fact]
public void Apply_PolicyGate_ClearsPendingReason()
{
var plan = BuildPolicyPlan();
var graph = new PackRunExecutionGraphBuilder().Build(plan);
var state = CreateInitialState(plan, graph);
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
Assert.False(result.HasBlockingFailure);
var gate = result.State.Steps["policy-check"];
Assert.Equal(PackRunStepExecutionStatus.Succeeded, gate.Status);
Assert.Null(gate.StatusReason);
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
var prepare = result.State.Steps["prepare"];
Assert.Equal(PackRunStepExecutionStatus.Pending, prepare.Status);
Assert.Null(prepare.StatusReason);
}
private static TaskPackPlan BuildApprovalPlan()
{
var manifest = TestManifests.Load(TestManifests.Sample);
var planner = new TaskPackPlanner();
var inputs = new Dictionary<string, System.Text.Json.Nodes.JsonNode?>
{
["dryRun"] = System.Text.Json.Nodes.JsonValue.Create(false)
};
return planner.Plan(manifest, inputs).Plan!;
}
private static TaskPackPlan BuildPolicyPlan()
{
var manifest = TestManifests.Load(TestManifests.PolicyGate);
var planner = new TaskPackPlanner();
return planner.Plan(manifest).Plan!;
}
private static PackRunState CreateInitialState(TaskPackPlan plan, PackRunExecutionGraph graph)
{
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal);
foreach (var step in EnumerateSteps(graph.Steps))
{
var status = PackRunStepExecutionStatus.Pending;
string? reason = null;
if (!step.Enabled)
{
status = PackRunStepExecutionStatus.Skipped;
reason = "disabled";
}
else if (step.Kind == PackRunStepKind.GateApproval)
{
reason = "requires-approval";
}
else if (step.Kind == PackRunStepKind.GatePolicy)
{
reason = "requires-policy";
}
steps[step.Id] = new PackRunStepStateRecord(
step.Id,
step.Kind,
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
status,
Attempts: 0,
LastTransitionAt: null,
NextAttemptAt: null,
StatusReason: reason);
}
return PackRunState.Create("run-1", plan.Hash, graph.FailurePolicy, steps, RequestedAt);
}
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
{
foreach (var step in steps)
{
yield return step;
if (step.Children.Count > 0)
{
foreach (var child in EnumerateSteps(step.Children))
{
yield return child;
}
}
}
}
}

View File

@@ -0,0 +1,75 @@
using System.Text.Json.Nodes;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Tests;
public sealed class PackRunSimulationEngineTests
{
[Fact]
public void Simulate_IdentifiesGateStatuses()
{
var manifest = TestManifests.Load(TestManifests.PolicyGate);
var planner = new TaskPackPlanner();
var plan = planner.Plan(manifest).Plan!;
var engine = new PackRunSimulationEngine();
var result = engine.Simulate(plan);
var gate = result.Steps.Single(step => step.Kind == PackRunStepKind.GatePolicy);
Assert.Equal(PackRunSimulationStatus.RequiresPolicy, gate.Status);
var run = result.Steps.Single(step => step.Kind == PackRunStepKind.Run);
Assert.Equal(PackRunSimulationStatus.Pending, run.Status);
}
[Fact]
public void Simulate_MarksDisabledStepsAndOutputs()
{
var manifest = TestManifests.Load(TestManifests.Sample);
var planner = new TaskPackPlanner();
var inputs = new Dictionary<string, JsonNode?>
{
["dryRun"] = JsonValue.Create(true)
};
var plan = planner.Plan(manifest, inputs).Plan!;
var engine = new PackRunSimulationEngine();
var result = engine.Simulate(plan);
var applyStep = result.Steps.Single(step => step.Id == "apply-step");
Assert.Equal(PackRunSimulationStatus.Skipped, applyStep.Status);
Assert.Empty(result.Outputs);
Assert.Equal(PackRunExecutionGraph.DefaultFailurePolicy.MaxAttempts, result.FailurePolicy.MaxAttempts);
Assert.Equal(PackRunExecutionGraph.DefaultFailurePolicy.BackoffSeconds, result.FailurePolicy.BackoffSeconds);
}
[Fact]
public void Simulate_ProjectsOutputsAndRuntimeFlags()
{
var manifest = TestManifests.Load(TestManifests.Output);
var planner = new TaskPackPlanner();
var plan = planner.Plan(manifest).Plan!;
var engine = new PackRunSimulationEngine();
var result = engine.Simulate(plan);
var step = Assert.Single(result.Steps);
Assert.Equal(PackRunStepKind.Run, step.Kind);
Assert.Collection(result.Outputs,
bundle =>
{
Assert.Equal("bundlePath", bundle.Name);
Assert.False(bundle.RequiresRuntimeValue);
},
evidence =>
{
Assert.Equal("evidenceModel", evidence.Name);
Assert.True(evidence.RequiresRuntimeValue);
});
}
}

View File

@@ -0,0 +1,66 @@
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Tests;
public sealed class PackRunStepStateMachineTests
{
private static readonly TaskPackPlanFailurePolicy RetryTwicePolicy = new(MaxAttempts: 3, BackoffSeconds: 5, ContinueOnError: false);
[Fact]
public void Start_FromPending_SetsRunning()
{
var state = PackRunStepStateMachine.Create();
var started = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
Assert.Equal(PackRunStepExecutionStatus.Running, started.Status);
Assert.Equal(0, started.Attempts);
}
[Fact]
public void CompleteSuccess_IncrementsAttempts()
{
var state = PackRunStepStateMachine.Create();
var running = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
var completed = PackRunStepStateMachine.CompleteSuccess(running, DateTimeOffset.UnixEpoch.AddSeconds(1));
Assert.Equal(PackRunStepExecutionStatus.Succeeded, completed.Status);
Assert.Equal(1, completed.Attempts);
Assert.Null(completed.NextAttemptAt);
}
[Fact]
public void RegisterFailure_SchedulesRetryUntilMaxAttempts()
{
var state = PackRunStepStateMachine.Create();
var running = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
var firstFailure = PackRunStepStateMachine.RegisterFailure(running, DateTimeOffset.UnixEpoch.AddSeconds(2), RetryTwicePolicy);
Assert.Equal(PackRunStepFailureOutcome.Retry, firstFailure.Outcome);
Assert.Equal(PackRunStepExecutionStatus.Pending, firstFailure.State.Status);
Assert.Equal(1, firstFailure.State.Attempts);
Assert.Equal(DateTimeOffset.UnixEpoch.AddSeconds(7), firstFailure.State.NextAttemptAt);
var restarted = PackRunStepStateMachine.Start(firstFailure.State, DateTimeOffset.UnixEpoch.AddSeconds(7));
var secondFailure = PackRunStepStateMachine.RegisterFailure(restarted, DateTimeOffset.UnixEpoch.AddSeconds(9), RetryTwicePolicy);
Assert.Equal(PackRunStepFailureOutcome.Retry, secondFailure.Outcome);
Assert.Equal(2, secondFailure.State.Attempts);
var finalStart = PackRunStepStateMachine.Start(secondFailure.State, DateTimeOffset.UnixEpoch.AddSeconds(9 + RetryTwicePolicy.BackoffSeconds));
var terminalFailure = PackRunStepStateMachine.RegisterFailure(finalStart, DateTimeOffset.UnixEpoch.AddSeconds(20), RetryTwicePolicy);
Assert.Equal(PackRunStepFailureOutcome.Abort, terminalFailure.Outcome);
Assert.Equal(PackRunStepExecutionStatus.Failed, terminalFailure.State.Status);
Assert.Equal(3, terminalFailure.State.Attempts);
Assert.Null(terminalFailure.State.NextAttemptAt);
}
[Fact]
public void Skip_FromPending_SetsSkipped()
{
var state = PackRunStepStateMachine.Create();
var skipped = PackRunStepStateMachine.Skip(state, DateTimeOffset.UnixEpoch.AddHours(1));
Assert.Equal(PackRunStepExecutionStatus.Skipped, skipped.Status);
Assert.Equal(0, skipped.Attempts);
}
}

View File

@@ -126,11 +126,11 @@ public sealed class TaskPackPlannerTests
}
[Fact]
public void Plan_WithOutputs_ProjectsResolvedValues()
{
var manifest = TestManifests.Load(TestManifests.Output);
var planner = new TaskPackPlanner();
public void Plan_WithOutputs_ProjectsResolvedValues()
{
var manifest = TestManifests.Load(TestManifests.Output);
var planner = new TaskPackPlanner();
var result = planner.Plan(manifest);
Assert.True(result.Success);
var plan = result.Plan!;
@@ -141,11 +141,26 @@ public sealed class TaskPackPlannerTests
Assert.False(bundle.Path!.RequiresRuntimeValue);
Assert.Equal("artifacts/report.txt", bundle.Path.Value!.GetValue<string>());
var evidence = plan.Outputs.First(o => o.Name == "evidenceModel");
Assert.NotNull(evidence.Expression);
Assert.True(evidence.Expression!.RequiresRuntimeValue);
Assert.Equal("steps.generate.outputs.evidence", evidence.Expression.Expression);
}
var evidence = plan.Outputs.First(o => o.Name == "evidenceModel");
Assert.NotNull(evidence.Expression);
Assert.True(evidence.Expression!.RequiresRuntimeValue);
Assert.Equal("steps.generate.outputs.evidence", evidence.Expression.Expression);
}
[Fact]
public void Plan_WithFailurePolicy_PopulatesPlanFailure()
{
var manifest = TestManifests.Load(TestManifests.FailurePolicy);
var planner = new TaskPackPlanner();
var result = planner.Plan(manifest);
Assert.True(result.Success);
var plan = result.Plan!;
Assert.NotNull(plan.FailurePolicy);
Assert.Equal(4, plan.FailurePolicy!.MaxAttempts);
Assert.Equal(30, plan.FailurePolicy.BackoffSeconds);
Assert.False(plan.FailurePolicy.ContinueOnError);
}
[Fact]
public void PolicyGateHints_IncludeRuntimeMetadata()

View File

@@ -138,11 +138,55 @@ spec:
- name: bundlePath
type: file
path: artifacts/report.txt
- name: evidenceModel
type: object
expression: "{{ steps.generate.outputs.evidence }}"
""";
- name: evidenceModel
type: object
expression: "{{ steps.generate.outputs.evidence }}"
""";
public const string FailurePolicy = """
apiVersion: stellaops.io/pack.v1
kind: TaskPack
metadata:
name: failure-policy-pack
version: 1.0.0
spec:
steps:
- id: build
run:
uses: builtin:build
failure:
retries:
maxAttempts: 4
backoffSeconds: 30
message: "Build failed."
""";
public const string Parallel = """
apiVersion: stellaops.io/pack.v1
kind: TaskPack
metadata:
name: parallel-pack
version: 1.1.0
spec:
steps:
- id: fanout
parallel:
maxParallel: 2
continueOnError: true
steps:
- id: lint
run:
uses: builtin:lint
- id: test
run:
uses: builtin:test
failure:
retries:
maxAttempts: 2
backoffSeconds: 10
message: "Parallel execution failed."
""";
public const string PolicyGate = """
apiVersion: stellaops.io/pack.v1
kind: TaskPack

View File

@@ -1,41 +1,242 @@
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi
builder.Services.AddOpenApi();
var app = builder.Build();
// Configure the HTTP request pipeline.
if (app.Environment.IsDevelopment())
{
app.MapOpenApi();
}
app.UseHttpsRedirection();
var summaries = new[]
{
"Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching"
};
app.MapGet("/weatherforecast", () =>
{
var forecast = Enumerable.Range(1, 5).Select(index =>
new WeatherForecast
(
DateOnly.FromDateTime(DateTime.Now.AddDays(index)),
Random.Shared.Next(-20, 55),
summaries[Random.Shared.Next(summaries.Length)]
))
.ToArray();
return forecast;
})
.WithName("GetWeatherForecast");
app.Run();
record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary)
{
public int TemperatureF => 32 + (int)(TemperatureC / 0.5556);
}
using System.Text.Json.Nodes;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
using StellaOps.TaskRunner.Core.Planning;
using StellaOps.TaskRunner.Core.TaskPacks;
using StellaOps.TaskRunner.Infrastructure.Execution;
using StellaOps.TaskRunner.WebService;
var builder = WebApplication.CreateBuilder(args);
builder.Services.Configure<TaskRunnerServiceOptions>(builder.Configuration.GetSection("TaskRunner"));
builder.Services.AddSingleton<TaskPackManifestLoader>();
builder.Services.AddSingleton<TaskPackPlanner>();
builder.Services.AddSingleton<PackRunSimulationEngine>();
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
return new FilePackRunStateStore(options.RunStatePath);
});
builder.Services.AddOpenApi();
var app = builder.Build();
if (app.Environment.IsDevelopment())
{
app.MapOpenApi();
}
app.MapPost("/v1/task-runner/simulations", async (
[FromBody] SimulationRequest request,
TaskPackManifestLoader loader,
TaskPackPlanner planner,
PackRunSimulationEngine simulationEngine,
CancellationToken cancellationToken) =>
{
if (string.IsNullOrWhiteSpace(request.Manifest))
{
return Results.BadRequest(new { error = "Manifest is required." });
}
TaskPackManifest manifest;
try
{
manifest = loader.Deserialize(request.Manifest);
}
catch (Exception ex)
{
return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
}
var inputs = ConvertInputs(request.Inputs);
var planResult = planner.Plan(manifest, inputs);
if (!planResult.Success || planResult.Plan is null)
{
return Results.BadRequest(new
{
errors = planResult.Errors.Select(error => new { error.Path, error.Message })
});
}
var plan = planResult.Plan;
var simulation = simulationEngine.Simulate(plan);
var response = SimulationMapper.ToResponse(plan, simulation);
return Results.Ok(response);
}).WithName("SimulateTaskPack");
app.MapGet("/v1/task-runner/runs/{runId}", async (
string runId,
IPackRunStateStore stateStore,
CancellationToken cancellationToken) =>
{
if (string.IsNullOrWhiteSpace(runId))
{
return Results.BadRequest(new { error = "runId is required." });
}
var state = await stateStore.GetAsync(runId, cancellationToken).ConfigureAwait(false);
if (state is null)
{
return Results.NotFound();
}
return Results.Ok(RunStateMapper.ToResponse(state));
}).WithName("GetRunState");
app.MapGet("/", () => Results.Redirect("/openapi"));
app.Run();
static IDictionary<string, JsonNode?>? ConvertInputs(JsonObject? node)
{
if (node is null)
{
return null;
}
var dictionary = new Dictionary<string, JsonNode?>(StringComparer.Ordinal);
foreach (var property in node)
{
dictionary[property.Key] = property.Value?.DeepClone();
}
return dictionary;
}
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
internal sealed record SimulationResponse(
string PlanHash,
FailurePolicyResponse FailurePolicy,
IReadOnlyList<SimulationStepResponse> Steps,
IReadOnlyList<SimulationOutputResponse> Outputs,
bool HasPendingApprovals);
internal sealed record SimulationStepResponse(
string Id,
string TemplateId,
string Kind,
bool Enabled,
string Status,
string? StatusReason,
string? Uses,
string? ApprovalId,
string? GateMessage,
int? MaxParallel,
bool ContinueOnError,
IReadOnlyList<SimulationStepResponse> Children);
internal sealed record SimulationOutputResponse(
string Name,
string Type,
bool RequiresRuntimeValue,
string? PathExpression,
string? ValueExpression);
internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
internal sealed record RunStateResponse(
string RunId,
string PlanHash,
FailurePolicyResponse FailurePolicy,
DateTimeOffset CreatedAt,
DateTimeOffset UpdatedAt,
IReadOnlyList<RunStateStepResponse> Steps);
internal sealed record RunStateStepResponse(
string StepId,
string Kind,
bool Enabled,
bool ContinueOnError,
int? MaxParallel,
string? ApprovalId,
string? GateMessage,
string Status,
int Attempts,
DateTimeOffset? LastTransitionAt,
DateTimeOffset? NextAttemptAt,
string? StatusReason);
internal static class SimulationMapper
{
public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
{
var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
var steps = result.Steps.Select(MapStep).ToList();
var outputs = result.Outputs.Select(MapOutput).ToList();
return new SimulationResponse(
plan.Hash,
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
steps,
outputs,
result.HasPendingApprovals);
}
private static SimulationStepResponse MapStep(PackRunSimulationNode node)
{
var children = node.Children.Select(MapStep).ToList();
return new SimulationStepResponse(
node.Id,
node.TemplateId,
node.Kind.ToString(),
node.Enabled,
node.Status.ToString(),
node.Status.ToString() switch
{
nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
nameof(PackRunSimulationStatus.Skipped) => "condition-false",
_ => null
},
node.Uses,
node.ApprovalId,
node.GateMessage,
node.MaxParallel,
node.ContinueOnError,
children);
}
private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
=> new(
output.Name,
output.Type,
output.RequiresRuntimeValue,
output.Path?.Expression,
output.Expression?.Expression);
}
internal static class RunStateMapper
{
public static RunStateResponse ToResponse(PackRunState state)
{
var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
var steps = state.Steps.Values
.OrderBy(step => step.StepId, StringComparer.Ordinal)
.Select(step => new RunStateStepResponse(
step.StepId,
step.Kind.ToString(),
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
step.Status.ToString(),
step.Attempts,
step.LastTransitionAt,
step.NextAttemptAt,
step.StatusReason))
.ToList();
return new RunStateResponse(
state.RunId,
state.PlanHash,
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
state.CreatedAt,
state.UpdatedAt,
steps);
}
}

View File

@@ -0,0 +1,6 @@
namespace StellaOps.TaskRunner.WebService;
public sealed class TaskRunnerServiceOptions
{
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
}

View File

@@ -1,9 +1,12 @@
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*",
"TaskRunner": {
"RunStatePath": "state/runs"
}
}

View File

@@ -3,6 +3,7 @@ using StellaOps.AirGap.Policy;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Infrastructure.Execution;
using StellaOps.TaskRunner.Worker.Services;
using StellaOps.TaskRunner.Core.Execution.Simulation;
var builder = Host.CreateApplicationBuilder(args);
@@ -23,10 +24,10 @@ builder.Services.AddSingleton<IPackRunJobDispatcher>(sp =>
var egressPolicy = sp.GetRequiredService<IEgressPolicy>();
return new FilesystemPackRunDispatcher(options.Value.QueuePath, options.Value.ArchivePath, egressPolicy);
});
builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp =>
{
var options = sp.GetRequiredService<IOptions<NotificationOptions>>().Value;
builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp =>
{
var options = sp.GetRequiredService<IOptions<NotificationOptions>>().Value;
if (options.ApprovalEndpoint is not null || options.PolicyEndpoint is not null)
{
return new HttpPackRunNotificationPublisher(
@@ -34,12 +35,21 @@ builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp =>
sp.GetRequiredService<IOptions<NotificationOptions>>(),
sp.GetRequiredService<ILogger<HttpPackRunNotificationPublisher>>());
}
return new LoggingPackRunNotificationPublisher(sp.GetRequiredService<ILogger<LoggingPackRunNotificationPublisher>>());
});
builder.Services.AddSingleton<PackRunProcessor>();
builder.Services.AddHostedService<PackRunWorkerService>();
var host = builder.Build();
host.Run();
return new LoggingPackRunNotificationPublisher(sp.GetRequiredService<ILogger<LoggingPackRunNotificationPublisher>>());
});
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
return new FilePackRunStateStore(options.Value.RunStatePath);
});
builder.Services.AddSingleton<IPackRunStepExecutor, NoopPackRunStepExecutor>();
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
builder.Services.AddSingleton<PackRunSimulationEngine>();
builder.Services.AddSingleton<PackRunProcessor>();
builder.Services.AddHostedService<PackRunWorkerService>();
var host = builder.Build();
host.Run();

View File

@@ -8,5 +8,7 @@ public sealed class PackRunWorkerOptions
public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive");
public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals");
}
public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals");
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
}

View File

@@ -1,49 +1,540 @@
using StellaOps.TaskRunner.Core.Execution;
using Microsoft.Extensions.Options;
namespace StellaOps.TaskRunner.Worker.Services;
public sealed class PackRunWorkerService : BackgroundService
{
private readonly IPackRunJobDispatcher dispatcher;
private readonly PackRunProcessor processor;
private readonly PackRunWorkerOptions options;
private readonly ILogger<PackRunWorkerService> logger;
public PackRunWorkerService(
IPackRunJobDispatcher dispatcher,
PackRunProcessor processor,
IOptions<PackRunWorkerOptions> options,
ILogger<PackRunWorkerService> logger)
{
this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher));
this.processor = processor ?? throw new ArgumentNullException(nameof(processor));
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
while (!stoppingToken.IsCancellationRequested)
{
var context = await dispatcher.TryDequeueAsync(stoppingToken).ConfigureAwait(false);
if (context is null)
{
await Task.Delay(options.IdleDelay, stoppingToken).ConfigureAwait(false);
continue;
}
logger.LogInformation("Processing pack run {RunId}.", context.RunId);
var result = await processor.ProcessNewRunAsync(context, stoppingToken).ConfigureAwait(false);
if (result.ShouldResumeImmediately)
{
logger.LogInformation("Run {RunId} is ready to resume immediately.", context.RunId);
}
else
{
logger.LogInformation("Run {RunId} is awaiting approvals.", context.RunId);
}
}
}
}
using System.Collections.Concurrent;
using System.Collections.ObjectModel;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Options;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Worker.Services;
public sealed class PackRunWorkerService : BackgroundService
{
private const string ChildFailureReason = "child-failure";
private const string AwaitingRetryReason = "awaiting-retry";
private readonly IPackRunJobDispatcher dispatcher;
private readonly PackRunProcessor processor;
private readonly PackRunWorkerOptions options;
private readonly IPackRunStateStore stateStore;
private readonly PackRunExecutionGraphBuilder graphBuilder;
private readonly PackRunSimulationEngine simulationEngine;
private readonly IPackRunStepExecutor executor;
private readonly ILogger<PackRunWorkerService> logger;
public PackRunWorkerService(
IPackRunJobDispatcher dispatcher,
PackRunProcessor processor,
IPackRunStateStore stateStore,
PackRunExecutionGraphBuilder graphBuilder,
PackRunSimulationEngine simulationEngine,
IPackRunStepExecutor executor,
IOptions<PackRunWorkerOptions> options,
ILogger<PackRunWorkerService> logger)
{
this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher));
this.processor = processor ?? throw new ArgumentNullException(nameof(processor));
this.stateStore = stateStore ?? throw new ArgumentNullException(nameof(stateStore));
this.graphBuilder = graphBuilder ?? throw new ArgumentNullException(nameof(graphBuilder));
this.simulationEngine = simulationEngine ?? throw new ArgumentNullException(nameof(simulationEngine));
this.executor = executor ?? throw new ArgumentNullException(nameof(executor));
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
while (!stoppingToken.IsCancellationRequested)
{
var context = await dispatcher.TryDequeueAsync(stoppingToken).ConfigureAwait(false);
if (context is null)
{
await Task.Delay(options.IdleDelay, stoppingToken).ConfigureAwait(false);
continue;
}
try
{
await ProcessRunAsync(context, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
logger.LogError(ex, "Unhandled exception while processing run {RunId}.", context.RunId);
}
}
}
private async Task ProcessRunAsync(PackRunExecutionContext context, CancellationToken cancellationToken)
{
logger.LogInformation("Processing pack run {RunId}.", context.RunId);
var processorResult = await processor.ProcessNewRunAsync(context, cancellationToken).ConfigureAwait(false);
var graph = graphBuilder.Build(context.Plan);
var state = await stateStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false);
if (state is null || !string.Equals(state.PlanHash, context.Plan.Hash, StringComparison.Ordinal))
{
state = await CreateInitialStateAsync(context, graph, cancellationToken).ConfigureAwait(false);
}
if (!processorResult.ShouldResumeImmediately)
{
logger.LogInformation("Run {RunId} awaiting approvals or policy gates.", context.RunId);
return;
}
var gateUpdate = PackRunGateStateUpdater.Apply(state, graph, processorResult.ApprovalCoordinator, DateTimeOffset.UtcNow);
state = gateUpdate.State;
if (gateUpdate.HasBlockingFailure)
{
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
logger.LogWarning("Run {RunId} halted because a gate failed.", context.RunId);
return;
}
var updatedState = await ExecuteGraphAsync(context, graph, state, cancellationToken).ConfigureAwait(false);
await stateStore.SaveAsync(updatedState, cancellationToken).ConfigureAwait(false);
if (updatedState.Steps.Values.All(step => step.Status is PackRunStepExecutionStatus.Succeeded or PackRunStepExecutionStatus.Skipped))
{
logger.LogInformation("Run {RunId} finished successfully.", context.RunId);
}
else
{
logger.LogInformation("Run {RunId} paused with pending work.", context.RunId);
}
}
private async Task<PackRunState> CreateInitialStateAsync(
PackRunExecutionContext context,
PackRunExecutionGraph graph,
CancellationToken cancellationToken)
{
var timestamp = DateTimeOffset.UtcNow;
var simulation = simulationEngine.Simulate(context.Plan);
var simulationIndex = IndexSimulation(simulation.Steps);
var stepRecords = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal);
foreach (var step in EnumerateSteps(graph.Steps))
{
var simulationStatus = simulationIndex.TryGetValue(step.Id, out var node)
? node.Status
: PackRunSimulationStatus.Pending;
var status = step.Enabled ? PackRunStepExecutionStatus.Pending : PackRunStepExecutionStatus.Skipped;
string? statusReason = null;
if (!step.Enabled)
{
statusReason = "disabled";
}
else if (simulationStatus == PackRunSimulationStatus.RequiresApproval)
{
statusReason = "requires-approval";
}
else if (simulationStatus == PackRunSimulationStatus.RequiresPolicy)
{
statusReason = "requires-policy";
}
else if (simulationStatus == PackRunSimulationStatus.Skipped)
{
status = PackRunStepExecutionStatus.Skipped;
statusReason = "condition-false";
}
var record = new PackRunStepStateRecord(
step.Id,
step.Kind,
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
status,
Attempts: 0,
LastTransitionAt: null,
NextAttemptAt: null,
StatusReason: statusReason);
stepRecords[step.Id] = record;
}
var failurePolicy = graph.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
var state = PackRunState.Create(context.RunId, context.Plan.Hash, failurePolicy, stepRecords, timestamp);
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
return state;
}
private async Task<PackRunState> ExecuteGraphAsync(
PackRunExecutionContext context,
PackRunExecutionGraph graph,
PackRunState state,
CancellationToken cancellationToken)
{
var mutable = new ConcurrentDictionary<string, PackRunStepStateRecord>(state.Steps, StringComparer.Ordinal);
var failurePolicy = graph.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
var executionContext = new ExecutionContext(context.RunId, failurePolicy, mutable, cancellationToken);
foreach (var step in graph.Steps)
{
var outcome = await ExecuteStepAsync(step, executionContext).ConfigureAwait(false);
if (outcome is StepExecutionOutcome.AbortRun or StepExecutionOutcome.Defer)
{
break;
}
}
var updated = new ReadOnlyDictionary<string, PackRunStepStateRecord>(mutable);
return state with
{
UpdatedAt = DateTimeOffset.UtcNow,
Steps = updated
};
}
private async Task<StepExecutionOutcome> ExecuteStepAsync(
PackRunExecutionStep step,
ExecutionContext executionContext)
{
executionContext.CancellationToken.ThrowIfCancellationRequested();
if (!executionContext.Steps.TryGetValue(step.Id, out var record))
{
return StepExecutionOutcome.Continue;
}
if (!record.Enabled)
{
return StepExecutionOutcome.Continue;
}
if (record.Status == PackRunStepExecutionStatus.Succeeded || record.Status == PackRunStepExecutionStatus.Skipped)
{
return StepExecutionOutcome.Continue;
}
if (record.NextAttemptAt is { } scheduled && scheduled > DateTimeOffset.UtcNow)
{
logger.LogInformation(
"Run {RunId} step {StepId} waiting until {NextAttempt} for retry.",
executionContext.RunId,
record.StepId,
scheduled);
return StepExecutionOutcome.Defer;
}
switch (step.Kind)
{
case PackRunStepKind.GateApproval:
case PackRunStepKind.GatePolicy:
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = DateTimeOffset.UtcNow,
NextAttemptAt = null
};
return StepExecutionOutcome.Continue;
case PackRunStepKind.Parallel:
return await ExecuteParallelStepAsync(step, executionContext).ConfigureAwait(false);
case PackRunStepKind.Map:
return await ExecuteMapStepAsync(step, executionContext).ConfigureAwait(false);
case PackRunStepKind.Run:
return await ExecuteRunStepAsync(step, executionContext).ConfigureAwait(false);
default:
logger.LogWarning("Run {RunId} encountered unsupported step kind '{Kind}' for step {StepId}. Marking as skipped.",
executionContext.RunId,
step.Kind,
step.Id);
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Skipped,
StatusReason = "unsupported-kind",
LastTransitionAt = DateTimeOffset.UtcNow
};
return StepExecutionOutcome.Continue;
}
}
private async Task<StepExecutionOutcome> ExecuteRunStepAsync(
PackRunExecutionStep step,
ExecutionContext executionContext)
{
var record = executionContext.Steps[step.Id];
var now = DateTimeOffset.UtcNow;
var currentState = new PackRunStepState(record.Status, record.Attempts, record.LastTransitionAt, record.NextAttemptAt);
if (currentState.Status == PackRunStepExecutionStatus.Pending)
{
currentState = PackRunStepStateMachine.Start(currentState, now);
record = record with
{
Status = currentState.Status,
LastTransitionAt = currentState.LastTransitionAt,
NextAttemptAt = currentState.NextAttemptAt,
StatusReason = null
};
executionContext.Steps[step.Id] = record;
}
var result = await executor.ExecuteAsync(step, step.Parameters ?? PackRunExecutionStep.EmptyParameters, executionContext.CancellationToken).ConfigureAwait(false);
if (result.Succeeded)
{
currentState = PackRunStepStateMachine.CompleteSuccess(currentState, DateTimeOffset.UtcNow);
executionContext.Steps[step.Id] = record with
{
Status = currentState.Status,
Attempts = currentState.Attempts,
LastTransitionAt = currentState.LastTransitionAt,
NextAttemptAt = currentState.NextAttemptAt,
StatusReason = null
};
return StepExecutionOutcome.Continue;
}
logger.LogWarning(
"Run {RunId} step {StepId} failed: {Error}",
executionContext.RunId,
step.Id,
result.Error ?? "unknown error");
var failure = PackRunStepStateMachine.RegisterFailure(currentState, DateTimeOffset.UtcNow, executionContext.FailurePolicy);
var updatedRecord = record with
{
Status = failure.State.Status,
Attempts = failure.State.Attempts,
LastTransitionAt = failure.State.LastTransitionAt,
NextAttemptAt = failure.State.NextAttemptAt,
StatusReason = result.Error
};
executionContext.Steps[step.Id] = updatedRecord;
return failure.Outcome switch
{
PackRunStepFailureOutcome.Retry => StepExecutionOutcome.Defer,
PackRunStepFailureOutcome.Abort when step.ContinueOnError => StepExecutionOutcome.Continue,
PackRunStepFailureOutcome.Abort => StepExecutionOutcome.AbortRun,
_ => StepExecutionOutcome.AbortRun
};
}
private async Task<StepExecutionOutcome> ExecuteParallelStepAsync(
PackRunExecutionStep step,
ExecutionContext executionContext)
{
var children = step.Children;
if (children.Count == 0)
{
MarkContainerSucceeded(step, executionContext);
return StepExecutionOutcome.Continue;
}
var maxParallel = step.MaxParallel is > 0 ? step.MaxParallel.Value : children.Count;
var queue = new Queue<PackRunExecutionStep>(children);
var running = new List<Task<StepExecutionOutcome>>(maxParallel);
var outcome = StepExecutionOutcome.Continue;
var childFailureDetected = false;
while (queue.Count > 0 || running.Count > 0)
{
while (queue.Count > 0 && running.Count < maxParallel)
{
var child = queue.Dequeue();
running.Add(ExecuteStepAsync(child, executionContext));
}
var completed = await Task.WhenAny(running).ConfigureAwait(false);
running.Remove(completed);
var childOutcome = await completed.ConfigureAwait(false);
switch (childOutcome)
{
case StepExecutionOutcome.AbortRun:
if (step.ContinueOnError)
{
childFailureDetected = true;
outcome = StepExecutionOutcome.Continue;
}
else
{
outcome = StepExecutionOutcome.AbortRun;
running.Clear();
queue.Clear();
}
break;
case StepExecutionOutcome.Defer:
outcome = StepExecutionOutcome.Defer;
running.Clear();
queue.Clear();
break;
default:
break;
}
if (!step.ContinueOnError && outcome != StepExecutionOutcome.Continue)
{
break;
}
}
if (outcome == StepExecutionOutcome.Continue)
{
if (childFailureDetected)
{
MarkContainerFailure(step, executionContext, ChildFailureReason);
}
else
{
MarkContainerSucceeded(step, executionContext);
}
}
else if (outcome == StepExecutionOutcome.AbortRun)
{
MarkContainerFailure(step, executionContext, ChildFailureReason);
}
else if (outcome == StepExecutionOutcome.Defer)
{
MarkContainerPending(step, executionContext, AwaitingRetryReason);
}
return outcome;
}
private async Task<StepExecutionOutcome> ExecuteMapStepAsync(
PackRunExecutionStep step,
ExecutionContext executionContext)
{
foreach (var child in step.Children)
{
var outcome = await ExecuteStepAsync(child, executionContext).ConfigureAwait(false);
if (outcome != StepExecutionOutcome.Continue)
{
if (outcome == StepExecutionOutcome.Defer)
{
MarkContainerPending(step, executionContext, AwaitingRetryReason);
return outcome;
}
if (!step.ContinueOnError)
{
MarkContainerFailure(step, executionContext, ChildFailureReason);
return outcome;
}
MarkContainerFailure(step, executionContext, ChildFailureReason);
}
}
MarkContainerSucceeded(step, executionContext);
return StepExecutionOutcome.Continue;
}
private void MarkContainerSucceeded(PackRunExecutionStep step, ExecutionContext executionContext)
{
if (!executionContext.Steps.TryGetValue(step.Id, out var record))
{
return;
}
if (record.Status == PackRunStepExecutionStatus.Succeeded)
{
return;
}
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = DateTimeOffset.UtcNow,
NextAttemptAt = null
};
}
private void MarkContainerFailure(PackRunExecutionStep step, ExecutionContext executionContext, string reason)
{
if (!executionContext.Steps.TryGetValue(step.Id, out var record))
{
return;
}
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Failed,
StatusReason = reason,
LastTransitionAt = DateTimeOffset.UtcNow
};
}
private void MarkContainerPending(PackRunExecutionStep step, ExecutionContext executionContext, string reason)
{
if (!executionContext.Steps.TryGetValue(step.Id, out var record))
{
return;
}
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Pending,
StatusReason = reason,
LastTransitionAt = DateTimeOffset.UtcNow
};
}
private static Dictionary<string, PackRunSimulationNode> IndexSimulation(IReadOnlyList<PackRunSimulationNode> steps)
{
var result = new Dictionary<string, PackRunSimulationNode>(StringComparer.Ordinal);
foreach (var node in steps)
{
result[node.Id] = node;
if (node.Children.Count > 0)
{
foreach (var child in IndexSimulation(node.Children))
{
result[child.Key] = child.Value;
}
}
}
return result;
}
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
{
foreach (var step in steps)
{
yield return step;
if (step.Children.Count > 0)
{
foreach (var child in EnumerateSteps(step.Children))
{
yield return child;
}
}
}
}
private sealed record ExecutionContext(
string RunId,
TaskPackPlanFailurePolicy FailurePolicy,
ConcurrentDictionary<string, PackRunStepStateRecord> Steps,
CancellationToken CancellationToken);
private enum StepExecutionOutcome
{
Continue,
Defer,
AbortRun
}
}

View File

@@ -5,12 +5,13 @@
"Microsoft.Hosting.Lifetime": "Information"
}
},
"Worker": {
"IdleDelay": "00:00:01",
"QueuePath": "queue",
"ArchivePath": "queue/archive",
"ApprovalStorePath": "state/approvals"
},
"Worker": {
"IdleDelay": "00:00:01",
"QueuePath": "queue",
"ArchivePath": "queue/archive",
"ApprovalStorePath": "state/approvals",
"RunStatePath": "state/runs"
},
"Notifications": {
"ApprovalEndpoint": null,
"PolicyEndpoint": null

View File

@@ -3,14 +3,18 @@
## Sprint 41 Foundations
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| TASKRUN-41-001 | DOING (2025-11-01) | Task Runner Guild | ORCH-SVC-41-101, AUTH-PACKS-41-001 | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Service builds/tests; migrations scripted; run API functional with sample pack; logs/artefacts stored; manifest signed; compliance checklist recorded. |
| TASKRUN-41-001 | DOING (2025-11-01) | Task Runner Guild | ORCH-SVC-41-101, AUTH-PACKS-41-001 | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Service builds/tests; migrations scripted; run API functional with sample pack; logs/artefacts stored; manifest signed; compliance checklist recorded. |
## Sprint 42 Advanced Execution
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| TASKRUN-42-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-41-001 | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gate integration, and failure recovery (retry/abort) with deterministic state. | Executor handles control flow; simulation returns plan; policy gates pause for approvals; tests cover restart/resume. |
| TASKRUN-42-001 | DONE (2025-11-04) | Task Runner Guild | TASKRUN-41-001 | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gate integration, and failure recovery (retry/abort) with deterministic state. | Executor handles control flow; simulation returns plan; policy gates pause for approvals; tests cover restart/resume. |
> 2025-10-29: Initiated manifest parsing + deterministic planning core to unblock approvals pipeline; building expression engine + plan hashing to support CLI parity.
> 2025-10-29: Landed manifest loader, planner, deterministic hash, outputs + approval/policy insights with unit tests; awaiting upstream APIs for execution-side wiring.
> 2025-11-04: Worker now builds execution graph, enforces parallelism/continue-on-error, persists retry windows, and WebService exposes simulation/run-state APIs.
> 2025-11-04: Resuming execution-engine enhancements (loops, conditionals, maxParallel) and simulation mode wiring; mapping failure recovery + policy gate enforcement plan.
> 2025-11-04: Continuing wiring — fixing file-backed state store, validating retry metadata, and preparing CLI surface for the simulation preview.
> 2025-11-04: Gate outcomes now reflect approval states; CLI `task-runner simulate` surfaces the new simulation API.
## Sprint 43 Approvals, Notifications, Hardening
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
@@ -37,7 +41,7 @@
## Air-Gapped Mode (Epic 16)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| TASKRUN-AIRGAP-56-001 | DOING (2025-11-03) | Task Runner Guild, AirGap Policy Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Enforce plan-time validation rejecting steps with non-allowlisted network calls in sealed mode and surface remediation errors. | Planner blocks disallowed steps; error contains remediation; tests cover sealed/unsealed behavior. |
| TASKRUN-AIRGAP-56-001 | DOING (2025-11-03) | Task Runner Guild, AirGap Policy Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Enforce plan-time validation rejecting steps with non-allowlisted network calls in sealed mode and surface remediation errors. | Planner blocks disallowed steps; error contains remediation; tests cover sealed/unsealed behavior. |
| TASKRUN-AIRGAP-56-002 | TODO | Task Runner Guild, AirGap Importer Guild | TASKRUN-AIRGAP-56-001, AIRGAP-IMP-57-002 | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. | Helper steps succeed deterministically; integration tests import sample bundle. |
| TASKRUN-AIRGAP-57-001 | TODO | Task Runner Guild, AirGap Controller Guild | TASKRUN-AIRGAP-56-001, AIRGAP-CTL-56-002 | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. | Mismatch detection works; timeline + telemetry record violation; docs updated. |
| TASKRUN-AIRGAP-58-001 | TODO | Task Runner Guild, Evidence Locker Guild | TASKRUN-OBS-53-001, EVID-OBS-55-001 | Capture bundle import job transcripts, hashed inputs, and outputs into portable evidence bundles. | Evidence recorded; manifests deterministic; timeline references created. |