feat: Implement Policy Engine Evaluation Service and Cache with unit tests
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Temp commit to debug
This commit is contained in:
@@ -1,28 +1,28 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Hosting;
|
||||
|
||||
public sealed class AdvisoryAiMetrics
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.AdvisoryAI", "1.0.0");
|
||||
|
||||
private readonly Counter<long> _requests;
|
||||
private readonly Counter<long> _queuePublished;
|
||||
private readonly Counter<long> _queueProcessed;
|
||||
|
||||
public AdvisoryAiMetrics()
|
||||
{
|
||||
_requests = Meter.CreateCounter<long>("advisory_ai_pipeline_requests_total");
|
||||
_queuePublished = Meter.CreateCounter<long>("advisory_ai_pipeline_messages_enqueued_total");
|
||||
_queueProcessed = Meter.CreateCounter<long>("advisory_ai_pipeline_messages_processed_total");
|
||||
}
|
||||
|
||||
public void RecordRequest(string taskType)
|
||||
=> _requests.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
|
||||
|
||||
public void RecordEnqueued(string taskType)
|
||||
=> _queuePublished.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
|
||||
|
||||
public void RecordProcessed(string taskType)
|
||||
=> _queueProcessed.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
|
||||
}
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Hosting;
|
||||
|
||||
public sealed class AdvisoryAiMetrics
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.AdvisoryAI", "1.0.0");
|
||||
|
||||
private readonly Counter<long> _requests;
|
||||
private readonly Counter<long> _queuePublished;
|
||||
private readonly Counter<long> _queueProcessed;
|
||||
|
||||
public AdvisoryAiMetrics()
|
||||
{
|
||||
_requests = Meter.CreateCounter<long>("advisory_ai_pipeline_requests_total");
|
||||
_queuePublished = Meter.CreateCounter<long>("advisory_ai_pipeline_messages_enqueued_total");
|
||||
_queueProcessed = Meter.CreateCounter<long>("advisory_ai_pipeline_messages_processed_total");
|
||||
}
|
||||
|
||||
public void RecordRequest(string taskType)
|
||||
=> _requests.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
|
||||
|
||||
public void RecordEnqueued(string taskType)
|
||||
=> _queuePublished.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
|
||||
|
||||
public void RecordProcessed(string taskType)
|
||||
=> _queueProcessed.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType));
|
||||
}
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.DependencyInjection;
|
||||
using StellaOps.AdvisoryAI.Providers;
|
||||
using StellaOps.AdvisoryAI.Queue;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Hosting;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddAdvisoryAiCore(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
Action<AdvisoryAiServiceOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
services.AddOptions<AdvisoryAiServiceOptions>()
|
||||
.Bind(configuration.GetSection("AdvisoryAI"))
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Hosting;
|
||||
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddAdvisoryAiCore(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration,
|
||||
Action<AdvisoryAiServiceOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
services.AddOptions<AdvisoryAiServiceOptions>()
|
||||
.Bind(configuration.GetSection("AdvisoryAI"))
|
||||
.PostConfigure(options =>
|
||||
{
|
||||
configure?.Invoke(options);
|
||||
AdvisoryAiServiceOptionsValidator.Validate(options);
|
||||
})
|
||||
.ValidateOnStart();
|
||||
|
||||
|
||||
services.AddOptions<SbomContextClientOptions>()
|
||||
.Configure<IOptions<AdvisoryAiServiceOptions>>((target, source) =>
|
||||
{
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Debug",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Debug",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"AdvisoryAI": {
|
||||
"SbomBaseAddress": "http://localhost:5210/",
|
||||
"Queue": {
|
||||
"DirectoryPath": "../var/advisory-ai-queue"
|
||||
}
|
||||
},
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
"AdvisoryAI": {
|
||||
"SbomBaseAddress": "http://localhost:5210/",
|
||||
"Queue": {
|
||||
"DirectoryPath": "../var/advisory-ai-queue"
|
||||
}
|
||||
},
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information",
|
||||
"Microsoft.AspNetCore": "Warning"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk.Worker">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<Project Sdk="Microsoft.NET.Sdk.Worker">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Debug"
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Debug"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"AdvisoryAI": {
|
||||
"SbomBaseAddress": "http://localhost:5210/",
|
||||
"Queue": {
|
||||
"DirectoryPath": "../var/advisory-ai-queue"
|
||||
}
|
||||
},
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information"
|
||||
}
|
||||
}
|
||||
}
|
||||
{
|
||||
"AdvisoryAI": {
|
||||
"SbomBaseAddress": "http://localhost:5210/",
|
||||
"Queue": {
|
||||
"DirectoryPath": "../var/advisory-ai-queue"
|
||||
}
|
||||
},
|
||||
"Logging": {
|
||||
"LogLevel": {
|
||||
"Default": "Information"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,235 +1,235 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 17
|
||||
VisualStudioVersion = 17.0.31903.59
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI", "StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj", "{E41E2FDA-3827-4B18-8596-B25BDE882D5F}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Tests", "__Tests\StellaOps.AdvisoryAI.Tests\StellaOps.AdvisoryAI.Tests.csproj", "{F6860DE5-0C7C-4848-8356-7555E3C391A3}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Testing", "..\Concelier\__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj", "{B53E4FED-8988-4354-8D1A-D3C618DBFD78}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "..\Concelier\__Libraries\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{E98A7C01-1619-41A0-A586-84EF9952F75D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "..\Concelier\__Libraries\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "..\Concelier\__Libraries\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{BBB5CD3C-866A-4298-ACE1-598413631CF5}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "..\Concelier\__Libraries\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{1313202A-E8A8-41E3-80BC-472096074681}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{F567F20C-552F-4761-941A-0552CEF68160}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{C8CE71D3-952A-43F7-9346-20113E37F672}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Hosting", "StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj", "{F3E0EA9E-E4F0-428A-804B-A599870B971D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.WebService", "StellaOps.AdvisoryAI.WebService\StellaOps.AdvisoryAI.WebService.csproj", "{AD5CEACE-7BF5-4D48-B473-D60188844A0A}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Worker", "StellaOps.AdvisoryAI.Worker\StellaOps.AdvisoryAI.Worker.csproj", "{BC68381E-B6EF-4481-8487-00267624D18C}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Debug|x64 = Debug|x64
|
||||
Debug|x86 = Debug|x86
|
||||
Release|Any CPU = Release|Any CPU
|
||||
Release|x64 = Release|x64
|
||||
Release|x86 = Release|x86
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x86.Build.0 = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x64.Build.0 = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x86.Build.0 = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x86.Build.0 = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x64.Build.0 = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x86.Build.0 = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x64.Build.0 = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x86.Build.0 = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x64.Build.0 = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x86.Build.0 = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x64.Build.0 = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x86.Build.0 = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x86.Build.0 = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x64.Build.0 = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x86.Build.0 = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x64.Build.0 = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x86.Build.0 = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x64.Build.0 = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 17
|
||||
VisualStudioVersion = 17.0.31903.59
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI", "StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj", "{E41E2FDA-3827-4B18-8596-B25BDE882D5F}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Tests", "__Tests\StellaOps.AdvisoryAI.Tests\StellaOps.AdvisoryAI.Tests.csproj", "{F6860DE5-0C7C-4848-8356-7555E3C391A3}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Testing", "..\Concelier\__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj", "{B53E4FED-8988-4354-8D1A-D3C618DBFD78}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "..\Concelier\__Libraries\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{E98A7C01-1619-41A0-A586-84EF9952F75D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "..\Concelier\__Libraries\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "..\Concelier\__Libraries\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{BBB5CD3C-866A-4298-ACE1-598413631CF5}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "..\Concelier\__Libraries\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{1313202A-E8A8-41E3-80BC-472096074681}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{F567F20C-552F-4761-941A-0552CEF68160}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{C8CE71D3-952A-43F7-9346-20113E37F672}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Hosting", "StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj", "{F3E0EA9E-E4F0-428A-804B-A599870B971D}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.WebService", "StellaOps.AdvisoryAI.WebService\StellaOps.AdvisoryAI.WebService.csproj", "{AD5CEACE-7BF5-4D48-B473-D60188844A0A}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.AdvisoryAI.Worker", "StellaOps.AdvisoryAI.Worker\StellaOps.AdvisoryAI.Worker.csproj", "{BC68381E-B6EF-4481-8487-00267624D18C}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
Debug|x64 = Debug|x64
|
||||
Debug|x86 = Debug|x86
|
||||
Release|Any CPU = Release|Any CPU
|
||||
Release|x64 = Release|x64
|
||||
Release|x86 = Release|x86
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{E41E2FDA-3827-4B18-8596-B25BDE882D5F}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3}.Release|x86.Build.0 = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x64.Build.0 = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{B53E4FED-8988-4354-8D1A-D3C618DBFD78}.Release|x86.Build.0 = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{E98A7C01-1619-41A0-A586-84EF9952F75D}.Release|x86.Build.0 = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x64.Build.0 = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{973DD52D-AD3C-4526-92CB-F35FDD9AEA10}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F7FB8ABD-31D7-4B4D-8B2A-F4D2B696ACAF}.Release|x86.Build.0 = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x64.Build.0 = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{BBB5CD3C-866A-4298-ACE1-598413631CF5}.Release|x86.Build.0 = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x64.Build.0 = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{7E3D9A33-BD0E-424A-88E6-F4440E386A3C}.Release|x86.Build.0 = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x64.Build.0 = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{1313202A-E8A8-41E3-80BC-472096074681}.Release|x86.Build.0 = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{1CC5F6F8-DF9A-4BCC-8C69-79E2DF604F6D}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F567F20C-552F-4761-941A-0552CEF68160}.Release|x86.Build.0 = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x64.Build.0 = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{C8CE71D3-952A-43F7-9346-20113E37F672}.Release|x86.Build.0 = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x64.Build.0 = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{F3E0EA9E-E4F0-428A-804B-A599870B971D}.Release|x86.Build.0 = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x64.Build.0 = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{AD5CEACE-7BF5-4D48-B473-D60188844A0A}.Release|x86.Build.0 = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x64.Build.0 = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{BC68381E-B6EF-4481-8487-00267624D18C}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{F6860DE5-0C7C-4848-8356-7555E3C391A3} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
@@ -1,41 +1,41 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Providers;
|
||||
using StellaOps.AdvisoryAI.Retrievers;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.DependencyInjection;
|
||||
|
||||
public static class SbomContextServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddSbomContext(this IServiceCollection services, Action<SbomContextClientOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var optionsBuilder = services.AddOptions<SbomContextClientOptions>();
|
||||
if (configure is not null)
|
||||
{
|
||||
optionsBuilder.Configure(configure);
|
||||
}
|
||||
|
||||
services.AddHttpClient<ISbomContextClient, SbomContextHttpClient>((serviceProvider, client) =>
|
||||
{
|
||||
var options = serviceProvider.GetRequiredService<IOptions<SbomContextClientOptions>>().Value;
|
||||
if (options.BaseAddress is not null)
|
||||
{
|
||||
client.BaseAddress = options.BaseAddress;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.Tenant) && !string.IsNullOrWhiteSpace(options.TenantHeaderName))
|
||||
{
|
||||
client.DefaultRequestHeaders.Remove(options.TenantHeaderName);
|
||||
client.DefaultRequestHeaders.Add(options.TenantHeaderName, options.Tenant);
|
||||
}
|
||||
});
|
||||
|
||||
services.TryAddSingleton<ISbomContextRetriever, SbomContextRetriever>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Providers;
|
||||
using StellaOps.AdvisoryAI.Retrievers;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.DependencyInjection;
|
||||
|
||||
public static class SbomContextServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddSbomContext(this IServiceCollection services, Action<SbomContextClientOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
var optionsBuilder = services.AddOptions<SbomContextClientOptions>();
|
||||
if (configure is not null)
|
||||
{
|
||||
optionsBuilder.Configure(configure);
|
||||
}
|
||||
|
||||
services.AddHttpClient<ISbomContextClient, SbomContextHttpClient>((serviceProvider, client) =>
|
||||
{
|
||||
var options = serviceProvider.GetRequiredService<IOptions<SbomContextClientOptions>>().Value;
|
||||
if (options.BaseAddress is not null)
|
||||
{
|
||||
client.BaseAddress = options.BaseAddress;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.Tenant) && !string.IsNullOrWhiteSpace(options.TenantHeaderName))
|
||||
{
|
||||
client.DefaultRequestHeaders.Remove(options.TenantHeaderName);
|
||||
client.DefaultRequestHeaders.Add(options.TenantHeaderName, options.Tenant);
|
||||
}
|
||||
});
|
||||
|
||||
services.TryAddSingleton<ISbomContextRetriever, SbomContextRetriever>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,118 +4,118 @@ using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrator
|
||||
{
|
||||
private readonly IAdvisoryStructuredRetriever _structuredRetriever;
|
||||
private readonly IAdvisoryVectorRetriever _vectorRetriever;
|
||||
private readonly ISbomContextRetriever _sbomContextRetriever;
|
||||
private readonly IDeterministicToolset _toolset;
|
||||
private readonly AdvisoryPipelineOptions _options;
|
||||
private readonly ILogger<AdvisoryPipelineOrchestrator>? _logger;
|
||||
|
||||
public AdvisoryPipelineOrchestrator(
|
||||
IAdvisoryStructuredRetriever structuredRetriever,
|
||||
IAdvisoryVectorRetriever vectorRetriever,
|
||||
ISbomContextRetriever sbomContextRetriever,
|
||||
IDeterministicToolset toolset,
|
||||
IOptions<AdvisoryPipelineOptions> options,
|
||||
ILogger<AdvisoryPipelineOrchestrator>? logger = null)
|
||||
{
|
||||
_structuredRetriever = structuredRetriever ?? throw new ArgumentNullException(nameof(structuredRetriever));
|
||||
_vectorRetriever = vectorRetriever ?? throw new ArgumentNullException(nameof(vectorRetriever));
|
||||
_sbomContextRetriever = sbomContextRetriever ?? throw new ArgumentNullException(nameof(sbomContextRetriever));
|
||||
_toolset = toolset ?? throw new ArgumentNullException(nameof(toolset));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_options.ApplyDefaults();
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<AdvisoryTaskPlan> CreatePlanAsync(AdvisoryTaskRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var config = _options.GetConfiguration(request.TaskType);
|
||||
|
||||
var structuredRequest = new AdvisoryRetrievalRequest(
|
||||
request.AdvisoryKey,
|
||||
request.PreferredSections,
|
||||
config.StructuredMaxChunks);
|
||||
|
||||
var structured = await _structuredRetriever
|
||||
.RetrieveAsync(structuredRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var vectorResults = await RetrieveVectorMatchesAsync(request, structuredRequest, config, cancellationToken).ConfigureAwait(false);
|
||||
var (sbomContext, dependencyAnalysis) = await RetrieveSbomContextAsync(request, config, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var metadata = BuildMetadata(request, structured, vectorResults, sbomContext, dependencyAnalysis);
|
||||
var cacheKey = ComputeCacheKey(request, structured, vectorResults, sbomContext, dependencyAnalysis);
|
||||
|
||||
var plan = new AdvisoryTaskPlan(
|
||||
request,
|
||||
cacheKey,
|
||||
config.PromptTemplate,
|
||||
structured.Chunks.ToImmutableArray(),
|
||||
vectorResults,
|
||||
sbomContext,
|
||||
dependencyAnalysis,
|
||||
config.Budget,
|
||||
metadata);
|
||||
|
||||
return plan;
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<AdvisoryVectorResult>> RetrieveVectorMatchesAsync(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalRequest structuredRequest,
|
||||
AdvisoryTaskConfiguration configuration,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (configuration.VectorQueries.Count == 0)
|
||||
{
|
||||
return ImmutableArray<AdvisoryVectorResult>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<AdvisoryVectorResult>(configuration.VectorQueries.Count);
|
||||
foreach (var query in configuration.GetVectorQueries())
|
||||
{
|
||||
var vectorRequest = new VectorRetrievalRequest(structuredRequest, query, configuration.VectorTopK);
|
||||
var matches = await _vectorRetriever
|
||||
.SearchAsync(vectorRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
builder.Add(new AdvisoryVectorResult(query, matches.ToImmutableArray()));
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private async Task<(SbomContextResult? Context, DependencyAnalysisResult? Analysis)> RetrieveSbomContextAsync(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryTaskConfiguration configuration,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrEmpty(request.ArtifactId))
|
||||
{
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
var sbomRequest = new SbomContextRequest(
|
||||
artifactId: request.ArtifactId!,
|
||||
purl: request.ArtifactPurl,
|
||||
maxTimelineEntries: configuration.SbomMaxTimelineEntries,
|
||||
maxDependencyPaths: configuration.SbomMaxDependencyPaths,
|
||||
includeEnvironmentFlags: configuration.IncludeEnvironmentFlags,
|
||||
includeBlastRadius: configuration.IncludeBlastRadius);
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrator
|
||||
{
|
||||
private readonly IAdvisoryStructuredRetriever _structuredRetriever;
|
||||
private readonly IAdvisoryVectorRetriever _vectorRetriever;
|
||||
private readonly ISbomContextRetriever _sbomContextRetriever;
|
||||
private readonly IDeterministicToolset _toolset;
|
||||
private readonly AdvisoryPipelineOptions _options;
|
||||
private readonly ILogger<AdvisoryPipelineOrchestrator>? _logger;
|
||||
|
||||
public AdvisoryPipelineOrchestrator(
|
||||
IAdvisoryStructuredRetriever structuredRetriever,
|
||||
IAdvisoryVectorRetriever vectorRetriever,
|
||||
ISbomContextRetriever sbomContextRetriever,
|
||||
IDeterministicToolset toolset,
|
||||
IOptions<AdvisoryPipelineOptions> options,
|
||||
ILogger<AdvisoryPipelineOrchestrator>? logger = null)
|
||||
{
|
||||
_structuredRetriever = structuredRetriever ?? throw new ArgumentNullException(nameof(structuredRetriever));
|
||||
_vectorRetriever = vectorRetriever ?? throw new ArgumentNullException(nameof(vectorRetriever));
|
||||
_sbomContextRetriever = sbomContextRetriever ?? throw new ArgumentNullException(nameof(sbomContextRetriever));
|
||||
_toolset = toolset ?? throw new ArgumentNullException(nameof(toolset));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_options.ApplyDefaults();
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<AdvisoryTaskPlan> CreatePlanAsync(AdvisoryTaskRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var config = _options.GetConfiguration(request.TaskType);
|
||||
|
||||
var structuredRequest = new AdvisoryRetrievalRequest(
|
||||
request.AdvisoryKey,
|
||||
request.PreferredSections,
|
||||
config.StructuredMaxChunks);
|
||||
|
||||
var structured = await _structuredRetriever
|
||||
.RetrieveAsync(structuredRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var vectorResults = await RetrieveVectorMatchesAsync(request, structuredRequest, config, cancellationToken).ConfigureAwait(false);
|
||||
var (sbomContext, dependencyAnalysis) = await RetrieveSbomContextAsync(request, config, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var metadata = BuildMetadata(request, structured, vectorResults, sbomContext, dependencyAnalysis);
|
||||
var cacheKey = ComputeCacheKey(request, structured, vectorResults, sbomContext, dependencyAnalysis);
|
||||
|
||||
var plan = new AdvisoryTaskPlan(
|
||||
request,
|
||||
cacheKey,
|
||||
config.PromptTemplate,
|
||||
structured.Chunks.ToImmutableArray(),
|
||||
vectorResults,
|
||||
sbomContext,
|
||||
dependencyAnalysis,
|
||||
config.Budget,
|
||||
metadata);
|
||||
|
||||
return plan;
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<AdvisoryVectorResult>> RetrieveVectorMatchesAsync(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalRequest structuredRequest,
|
||||
AdvisoryTaskConfiguration configuration,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (configuration.VectorQueries.Count == 0)
|
||||
{
|
||||
return ImmutableArray<AdvisoryVectorResult>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<AdvisoryVectorResult>(configuration.VectorQueries.Count);
|
||||
foreach (var query in configuration.GetVectorQueries())
|
||||
{
|
||||
var vectorRequest = new VectorRetrievalRequest(structuredRequest, query, configuration.VectorTopK);
|
||||
var matches = await _vectorRetriever
|
||||
.SearchAsync(vectorRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
builder.Add(new AdvisoryVectorResult(query, matches.ToImmutableArray()));
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private async Task<(SbomContextResult? Context, DependencyAnalysisResult? Analysis)> RetrieveSbomContextAsync(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryTaskConfiguration configuration,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrEmpty(request.ArtifactId))
|
||||
{
|
||||
return (null, null);
|
||||
}
|
||||
|
||||
var sbomRequest = new SbomContextRequest(
|
||||
artifactId: request.ArtifactId!,
|
||||
purl: request.ArtifactPurl,
|
||||
maxTimelineEntries: configuration.SbomMaxTimelineEntries,
|
||||
maxDependencyPaths: configuration.SbomMaxDependencyPaths,
|
||||
includeEnvironmentFlags: configuration.IncludeEnvironmentFlags,
|
||||
includeBlastRadius: configuration.IncludeBlastRadius);
|
||||
|
||||
var context = await _sbomContextRetriever
|
||||
.RetrieveAsync(sbomRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
@@ -128,73 +128,73 @@ internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrat
|
||||
private static ImmutableDictionary<string, string> BuildMetadata(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalResult structured,
|
||||
ImmutableArray<AdvisoryVectorResult> vectors,
|
||||
SbomContextResult? sbom,
|
||||
DependencyAnalysisResult? dependency)
|
||||
{
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
builder["task_type"] = request.TaskType.ToString();
|
||||
builder["advisory_key"] = request.AdvisoryKey;
|
||||
builder["profile"] = request.Profile;
|
||||
ImmutableArray<AdvisoryVectorResult> vectors,
|
||||
SbomContextResult? sbom,
|
||||
DependencyAnalysisResult? dependency)
|
||||
{
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
builder["task_type"] = request.TaskType.ToString();
|
||||
builder["advisory_key"] = request.AdvisoryKey;
|
||||
builder["profile"] = request.Profile;
|
||||
builder["structured_chunk_count"] = structured.Chunks.Count().ToString(CultureInfo.InvariantCulture);
|
||||
builder["vector_query_count"] = vectors.Length.ToString(CultureInfo.InvariantCulture);
|
||||
builder["vector_match_count"] = vectors.Sum(result => result.Matches.Length).ToString(CultureInfo.InvariantCulture);
|
||||
builder["includes_sbom"] = (sbom is not null).ToString();
|
||||
builder["dependency_node_count"] = (dependency?.Nodes.Length ?? 0).ToString(CultureInfo.InvariantCulture);
|
||||
builder["force_refresh"] = request.ForceRefresh.ToString();
|
||||
|
||||
if (!string.IsNullOrEmpty(request.PolicyVersion))
|
||||
{
|
||||
builder["policy_version"] = request.PolicyVersion!;
|
||||
}
|
||||
|
||||
if (sbom is not null)
|
||||
{
|
||||
builder["vector_query_count"] = vectors.Length.ToString(CultureInfo.InvariantCulture);
|
||||
builder["vector_match_count"] = vectors.Sum(result => result.Matches.Length).ToString(CultureInfo.InvariantCulture);
|
||||
builder["includes_sbom"] = (sbom is not null).ToString();
|
||||
builder["dependency_node_count"] = (dependency?.Nodes.Length ?? 0).ToString(CultureInfo.InvariantCulture);
|
||||
builder["force_refresh"] = request.ForceRefresh.ToString();
|
||||
|
||||
if (!string.IsNullOrEmpty(request.PolicyVersion))
|
||||
{
|
||||
builder["policy_version"] = request.PolicyVersion!;
|
||||
}
|
||||
|
||||
if (sbom is not null)
|
||||
{
|
||||
builder["sbom_version_count"] = sbom.VersionTimeline.Length.ToString(CultureInfo.InvariantCulture);
|
||||
builder["sbom_dependency_path_count"] = sbom.DependencyPaths.Length.ToString(CultureInfo.InvariantCulture);
|
||||
|
||||
if (!sbom.EnvironmentFlags.IsEmpty)
|
||||
{
|
||||
foreach (var flag in sbom.EnvironmentFlags.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"sbom_env_{flag.Key}"] = flag.Value;
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom.BlastRadius is not null)
|
||||
{
|
||||
builder["sbom_blast_impacted_assets"] = sbom.BlastRadius.ImpactedAssets.ToString(CultureInfo.InvariantCulture);
|
||||
builder["sbom_blast_impacted_workloads"] = sbom.BlastRadius.ImpactedWorkloads.ToString(CultureInfo.InvariantCulture);
|
||||
builder["sbom_blast_impacted_namespaces"] = sbom.BlastRadius.ImpactedNamespaces.ToString(CultureInfo.InvariantCulture);
|
||||
if (sbom.BlastRadius.ImpactedPercentage is not null)
|
||||
{
|
||||
builder["sbom_blast_impacted_percentage"] = sbom.BlastRadius.ImpactedPercentage.Value.ToString("G", CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
if (!sbom.BlastRadius.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.BlastRadius.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"sbom_blast_meta_{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sbom.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"sbom_meta_{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dependency is not null)
|
||||
{
|
||||
foreach (var kvp in dependency.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"dependency_{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
|
||||
if (!sbom.EnvironmentFlags.IsEmpty)
|
||||
{
|
||||
foreach (var flag in sbom.EnvironmentFlags.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"sbom_env_{flag.Key}"] = flag.Value;
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom.BlastRadius is not null)
|
||||
{
|
||||
builder["sbom_blast_impacted_assets"] = sbom.BlastRadius.ImpactedAssets.ToString(CultureInfo.InvariantCulture);
|
||||
builder["sbom_blast_impacted_workloads"] = sbom.BlastRadius.ImpactedWorkloads.ToString(CultureInfo.InvariantCulture);
|
||||
builder["sbom_blast_impacted_namespaces"] = sbom.BlastRadius.ImpactedNamespaces.ToString(CultureInfo.InvariantCulture);
|
||||
if (sbom.BlastRadius.ImpactedPercentage is not null)
|
||||
{
|
||||
builder["sbom_blast_impacted_percentage"] = sbom.BlastRadius.ImpactedPercentage.Value.ToString("G", CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
if (!sbom.BlastRadius.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.BlastRadius.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"sbom_blast_meta_{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sbom.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"sbom_meta_{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dependency is not null)
|
||||
{
|
||||
foreach (var kvp in dependency.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder[$"dependency_{kvp.Key}"] = kvp.Value;
|
||||
}
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
@@ -228,178 +228,178 @@ internal sealed class AdvisoryPipelineOrchestrator : IAdvisoryPipelineOrchestrat
|
||||
context.Metadata);
|
||||
}
|
||||
|
||||
private static string ComputeCacheKey(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalResult structured,
|
||||
ImmutableArray<AdvisoryVectorResult> vectors,
|
||||
SbomContextResult? sbom,
|
||||
DependencyAnalysisResult? dependency)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.Append(request.TaskType)
|
||||
.Append('|').Append(request.AdvisoryKey)
|
||||
.Append('|').Append(request.ArtifactId ?? string.Empty)
|
||||
.Append('|').Append(request.PolicyVersion ?? string.Empty)
|
||||
.Append('|').Append(request.Profile);
|
||||
|
||||
if (request.PreferredSections is not null)
|
||||
{
|
||||
foreach (var section in request.PreferredSections.OrderBy(s => s, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
builder.Append('|').Append(section);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var chunkId in structured.Chunks
|
||||
.Select(chunk => chunk.ChunkId)
|
||||
.OrderBy(id => id, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|chunk:").Append(chunkId);
|
||||
}
|
||||
|
||||
foreach (var vector in vectors)
|
||||
{
|
||||
builder.Append("|query:").Append(vector.Query);
|
||||
foreach (var match in vector.Matches
|
||||
.OrderBy(m => m.ChunkId, StringComparer.Ordinal)
|
||||
.ThenBy(m => m.Score))
|
||||
{
|
||||
builder.Append("|match:")
|
||||
.Append(match.ChunkId)
|
||||
.Append('@')
|
||||
.Append(match.Score.ToString("G", CultureInfo.InvariantCulture));
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom is not null)
|
||||
{
|
||||
private static string ComputeCacheKey(
|
||||
AdvisoryTaskRequest request,
|
||||
AdvisoryRetrievalResult structured,
|
||||
ImmutableArray<AdvisoryVectorResult> vectors,
|
||||
SbomContextResult? sbom,
|
||||
DependencyAnalysisResult? dependency)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.Append(request.TaskType)
|
||||
.Append('|').Append(request.AdvisoryKey)
|
||||
.Append('|').Append(request.ArtifactId ?? string.Empty)
|
||||
.Append('|').Append(request.PolicyVersion ?? string.Empty)
|
||||
.Append('|').Append(request.Profile);
|
||||
|
||||
if (request.PreferredSections is not null)
|
||||
{
|
||||
foreach (var section in request.PreferredSections.OrderBy(s => s, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
builder.Append('|').Append(section);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var chunkId in structured.Chunks
|
||||
.Select(chunk => chunk.ChunkId)
|
||||
.OrderBy(id => id, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|chunk:").Append(chunkId);
|
||||
}
|
||||
|
||||
foreach (var vector in vectors)
|
||||
{
|
||||
builder.Append("|query:").Append(vector.Query);
|
||||
foreach (var match in vector.Matches
|
||||
.OrderBy(m => m.ChunkId, StringComparer.Ordinal)
|
||||
.ThenBy(m => m.Score))
|
||||
{
|
||||
builder.Append("|match:")
|
||||
.Append(match.ChunkId)
|
||||
.Append('@')
|
||||
.Append(match.Score.ToString("G", CultureInfo.InvariantCulture));
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom is not null)
|
||||
{
|
||||
builder.Append("|sbom:timeline=").Append(sbom.VersionTimeline.Length);
|
||||
builder.Append("|sbom:paths=").Append(sbom.DependencyPaths.Length);
|
||||
foreach (var entry in sbom.VersionTimeline
|
||||
.OrderBy(e => e.Version, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.FirstObserved.ToUnixTimeMilliseconds())
|
||||
.ThenBy(e => e.LastObserved?.ToUnixTimeMilliseconds() ?? long.MinValue)
|
||||
.ThenBy(e => e.Status, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Source, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|timeline:")
|
||||
.Append(entry.Version)
|
||||
.Append('@')
|
||||
.Append(entry.FirstObserved.ToUnixTimeMilliseconds())
|
||||
.Append('@')
|
||||
.Append(entry.LastObserved?.ToUnixTimeMilliseconds() ?? -1)
|
||||
.Append('@')
|
||||
.Append(entry.Status)
|
||||
.Append('@')
|
||||
.Append(entry.Source);
|
||||
}
|
||||
|
||||
foreach (var path in sbom.DependencyPaths
|
||||
.OrderBy(path => path.IsRuntime)
|
||||
.ThenBy(path => string.Join(">", path.Nodes.Select(node => node.Identifier)), StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|path:")
|
||||
.Append(path.IsRuntime ? 'R' : 'D');
|
||||
|
||||
foreach (var node in path.Nodes)
|
||||
{
|
||||
builder.Append(":")
|
||||
.Append(node.Identifier)
|
||||
.Append('@')
|
||||
.Append(node.Version ?? string.Empty);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(path.Source))
|
||||
{
|
||||
builder.Append("|pathsrc:").Append(path.Source);
|
||||
}
|
||||
|
||||
if (!path.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in path.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|pathmeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sbom.EnvironmentFlags.IsEmpty)
|
||||
{
|
||||
foreach (var flag in sbom.EnvironmentFlags.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|env:")
|
||||
.Append(flag.Key)
|
||||
.Append('=')
|
||||
.Append(flag.Value);
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom.BlastRadius is not null)
|
||||
{
|
||||
builder.Append("|blast:")
|
||||
.Append(sbom.BlastRadius.ImpactedAssets)
|
||||
.Append(',')
|
||||
.Append(sbom.BlastRadius.ImpactedWorkloads)
|
||||
.Append(',')
|
||||
.Append(sbom.BlastRadius.ImpactedNamespaces)
|
||||
.Append(',')
|
||||
.Append(sbom.BlastRadius.ImpactedPercentage?.ToString("G", CultureInfo.InvariantCulture) ?? string.Empty);
|
||||
|
||||
if (!sbom.BlastRadius.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.BlastRadius.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|blastmeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sbom.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|sbommeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dependency is not null)
|
||||
{
|
||||
foreach (var node in dependency.Nodes
|
||||
.OrderBy(n => n.Identifier, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|dep:")
|
||||
.Append(node.Identifier)
|
||||
.Append(':')
|
||||
.Append(node.RuntimeOccurrences)
|
||||
.Append(':')
|
||||
.Append(node.DevelopmentOccurrences)
|
||||
.Append(':')
|
||||
.Append(string.Join(',', node.Versions));
|
||||
}
|
||||
|
||||
if (!dependency.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in dependency.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|depmeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
|
||||
return Convert.ToHexString(hash);
|
||||
}
|
||||
}
|
||||
foreach (var entry in sbom.VersionTimeline
|
||||
.OrderBy(e => e.Version, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.FirstObserved.ToUnixTimeMilliseconds())
|
||||
.ThenBy(e => e.LastObserved?.ToUnixTimeMilliseconds() ?? long.MinValue)
|
||||
.ThenBy(e => e.Status, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Source, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|timeline:")
|
||||
.Append(entry.Version)
|
||||
.Append('@')
|
||||
.Append(entry.FirstObserved.ToUnixTimeMilliseconds())
|
||||
.Append('@')
|
||||
.Append(entry.LastObserved?.ToUnixTimeMilliseconds() ?? -1)
|
||||
.Append('@')
|
||||
.Append(entry.Status)
|
||||
.Append('@')
|
||||
.Append(entry.Source);
|
||||
}
|
||||
|
||||
foreach (var path in sbom.DependencyPaths
|
||||
.OrderBy(path => path.IsRuntime)
|
||||
.ThenBy(path => string.Join(">", path.Nodes.Select(node => node.Identifier)), StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|path:")
|
||||
.Append(path.IsRuntime ? 'R' : 'D');
|
||||
|
||||
foreach (var node in path.Nodes)
|
||||
{
|
||||
builder.Append(":")
|
||||
.Append(node.Identifier)
|
||||
.Append('@')
|
||||
.Append(node.Version ?? string.Empty);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(path.Source))
|
||||
{
|
||||
builder.Append("|pathsrc:").Append(path.Source);
|
||||
}
|
||||
|
||||
if (!path.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in path.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|pathmeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sbom.EnvironmentFlags.IsEmpty)
|
||||
{
|
||||
foreach (var flag in sbom.EnvironmentFlags.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|env:")
|
||||
.Append(flag.Key)
|
||||
.Append('=')
|
||||
.Append(flag.Value);
|
||||
}
|
||||
}
|
||||
|
||||
if (sbom.BlastRadius is not null)
|
||||
{
|
||||
builder.Append("|blast:")
|
||||
.Append(sbom.BlastRadius.ImpactedAssets)
|
||||
.Append(',')
|
||||
.Append(sbom.BlastRadius.ImpactedWorkloads)
|
||||
.Append(',')
|
||||
.Append(sbom.BlastRadius.ImpactedNamespaces)
|
||||
.Append(',')
|
||||
.Append(sbom.BlastRadius.ImpactedPercentage?.ToString("G", CultureInfo.InvariantCulture) ?? string.Empty);
|
||||
|
||||
if (!sbom.BlastRadius.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.BlastRadius.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|blastmeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!sbom.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in sbom.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|sbommeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (dependency is not null)
|
||||
{
|
||||
foreach (var node in dependency.Nodes
|
||||
.OrderBy(n => n.Identifier, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|dep:")
|
||||
.Append(node.Identifier)
|
||||
.Append(':')
|
||||
.Append(node.RuntimeOccurrences)
|
||||
.Append(':')
|
||||
.Append(node.DevelopmentOccurrences)
|
||||
.Append(':')
|
||||
.Append(string.Join(',', node.Versions));
|
||||
}
|
||||
|
||||
if (!dependency.Metadata.IsEmpty)
|
||||
{
|
||||
foreach (var kvp in dependency.Metadata.OrderBy(pair => pair.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("|depmeta:")
|
||||
.Append(kvp.Key)
|
||||
.Append('=')
|
||||
.Append(kvp.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
|
||||
return Convert.ToHexString(hash);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,70 +1,70 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Documents;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public sealed class AdvisoryTaskPlan
|
||||
{
|
||||
public AdvisoryTaskPlan(
|
||||
AdvisoryTaskRequest request,
|
||||
string cacheKey,
|
||||
string promptTemplate,
|
||||
ImmutableArray<AdvisoryChunk> structuredChunks,
|
||||
ImmutableArray<AdvisoryVectorResult> vectorResults,
|
||||
SbomContextResult? sbomContext,
|
||||
DependencyAnalysisResult? dependencyAnalysis,
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Orchestration;
|
||||
|
||||
public sealed class AdvisoryTaskPlan
|
||||
{
|
||||
public AdvisoryTaskPlan(
|
||||
AdvisoryTaskRequest request,
|
||||
string cacheKey,
|
||||
string promptTemplate,
|
||||
ImmutableArray<AdvisoryChunk> structuredChunks,
|
||||
ImmutableArray<AdvisoryVectorResult> vectorResults,
|
||||
SbomContextResult? sbomContext,
|
||||
DependencyAnalysisResult? dependencyAnalysis,
|
||||
AdvisoryTaskBudget budget,
|
||||
ImmutableDictionary<string, string> metadata)
|
||||
{
|
||||
Request = request ?? throw new ArgumentNullException(nameof(request));
|
||||
CacheKey = cacheKey ?? throw new ArgumentNullException(nameof(cacheKey));
|
||||
PromptTemplate = promptTemplate ?? throw new ArgumentNullException(nameof(promptTemplate));
|
||||
StructuredChunks = structuredChunks;
|
||||
VectorResults = vectorResults;
|
||||
SbomContext = sbomContext;
|
||||
DependencyAnalysis = dependencyAnalysis;
|
||||
Budget = budget ?? throw new ArgumentNullException(nameof(budget));
|
||||
Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata));
|
||||
}
|
||||
|
||||
public AdvisoryTaskRequest Request { get; }
|
||||
|
||||
public string CacheKey { get; }
|
||||
|
||||
public string PromptTemplate { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryChunk> StructuredChunks { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryVectorResult> VectorResults { get; }
|
||||
|
||||
public SbomContextResult? SbomContext { get; }
|
||||
|
||||
public DependencyAnalysisResult? DependencyAnalysis { get; }
|
||||
|
||||
public AdvisoryTaskBudget Budget { get; }
|
||||
|
||||
{
|
||||
Request = request ?? throw new ArgumentNullException(nameof(request));
|
||||
CacheKey = cacheKey ?? throw new ArgumentNullException(nameof(cacheKey));
|
||||
PromptTemplate = promptTemplate ?? throw new ArgumentNullException(nameof(promptTemplate));
|
||||
StructuredChunks = structuredChunks;
|
||||
VectorResults = vectorResults;
|
||||
SbomContext = sbomContext;
|
||||
DependencyAnalysis = dependencyAnalysis;
|
||||
Budget = budget ?? throw new ArgumentNullException(nameof(budget));
|
||||
Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata));
|
||||
}
|
||||
|
||||
public AdvisoryTaskRequest Request { get; }
|
||||
|
||||
public string CacheKey { get; }
|
||||
|
||||
public string PromptTemplate { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryChunk> StructuredChunks { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryVectorResult> VectorResults { get; }
|
||||
|
||||
public SbomContextResult? SbomContext { get; }
|
||||
|
||||
public DependencyAnalysisResult? DependencyAnalysis { get; }
|
||||
|
||||
public AdvisoryTaskBudget Budget { get; }
|
||||
|
||||
public ImmutableDictionary<string, string> Metadata { get; }
|
||||
}
|
||||
|
||||
public sealed class AdvisoryVectorResult
|
||||
{
|
||||
public AdvisoryVectorResult(string query, ImmutableArray<VectorRetrievalMatch> matches)
|
||||
{
|
||||
Query = string.IsNullOrWhiteSpace(query) ? throw new ArgumentException(nameof(query)) : query;
|
||||
Matches = matches;
|
||||
}
|
||||
|
||||
public string Query { get; }
|
||||
|
||||
public ImmutableArray<VectorRetrievalMatch> Matches { get; }
|
||||
}
|
||||
|
||||
public sealed class AdvisoryTaskBudget
|
||||
{
|
||||
public int PromptTokens { get; init; } = 2048;
|
||||
|
||||
public int CompletionTokens { get; init; } = 512;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class AdvisoryVectorResult
|
||||
{
|
||||
public AdvisoryVectorResult(string query, ImmutableArray<VectorRetrievalMatch> matches)
|
||||
{
|
||||
Query = string.IsNullOrWhiteSpace(query) ? throw new ArgumentException(nameof(query)) : query;
|
||||
Matches = matches;
|
||||
}
|
||||
|
||||
public string Query { get; }
|
||||
|
||||
public ImmutableArray<VectorRetrievalMatch> Matches { get; }
|
||||
}
|
||||
|
||||
public sealed class AdvisoryTaskBudget
|
||||
{
|
||||
public int PromptTokens { get; init; } = 2048;
|
||||
|
||||
public int CompletionTokens { get; init; } = 512;
|
||||
}
|
||||
|
||||
@@ -1,30 +1,30 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for the SBOM context HTTP client.
|
||||
/// </summary>
|
||||
public sealed class SbomContextClientOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Base address for the SBOM service. Required.
|
||||
/// </summary>
|
||||
public Uri? BaseAddress { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Relative endpoint that returns SBOM context payloads.
|
||||
/// Defaults to <c>api/sbom/context</c>.
|
||||
/// </summary>
|
||||
public string ContextEndpoint { get; set; } = "api/sbom/context";
|
||||
|
||||
/// <summary>
|
||||
/// Optional tenant identifier that should be forwarded to the SBOM service.
|
||||
/// </summary>
|
||||
public string? Tenant { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Header name used when forwarding the tenant. Defaults to <c>X-StellaOps-Tenant</c>.
|
||||
/// </summary>
|
||||
public string TenantHeaderName { get; set; } = "X-StellaOps-Tenant";
|
||||
}
|
||||
using System;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for the SBOM context HTTP client.
|
||||
/// </summary>
|
||||
public sealed class SbomContextClientOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Base address for the SBOM service. Required.
|
||||
/// </summary>
|
||||
public Uri? BaseAddress { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Relative endpoint that returns SBOM context payloads.
|
||||
/// Defaults to <c>api/sbom/context</c>.
|
||||
/// </summary>
|
||||
public string ContextEndpoint { get; set; } = "api/sbom/context";
|
||||
|
||||
/// <summary>
|
||||
/// Optional tenant identifier that should be forwarded to the SBOM service.
|
||||
/// </summary>
|
||||
public string? Tenant { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Header name used when forwarding the tenant. Defaults to <c>X-StellaOps-Tenant</c>.
|
||||
/// </summary>
|
||||
public string TenantHeaderName { get; set; } = "X-StellaOps-Tenant";
|
||||
}
|
||||
|
||||
@@ -1,234 +1,234 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Providers;
|
||||
|
||||
internal sealed class SbomContextHttpClient : ISbomContextClient
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly HttpClient httpClient;
|
||||
private readonly SbomContextClientOptions options;
|
||||
private readonly ILogger<SbomContextHttpClient>? logger;
|
||||
|
||||
public SbomContextHttpClient(
|
||||
HttpClient httpClient,
|
||||
IOptions<SbomContextClientOptions> options,
|
||||
ILogger<SbomContextHttpClient>? logger = null)
|
||||
{
|
||||
this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
if (options is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
this.options = options.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
|
||||
if (this.options.BaseAddress is not null && this.httpClient.BaseAddress is null)
|
||||
{
|
||||
this.httpClient.BaseAddress = this.options.BaseAddress;
|
||||
}
|
||||
|
||||
if (this.httpClient.BaseAddress is null)
|
||||
{
|
||||
throw new InvalidOperationException("SBOM context client requires a BaseAddress to be configured.");
|
||||
}
|
||||
|
||||
this.httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/json");
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public async Task<SbomContextDocument?> GetContextAsync(SbomContextQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
if (query is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(query));
|
||||
}
|
||||
|
||||
var endpoint = options.ContextEndpoint?.Trim() ?? string.Empty;
|
||||
if (endpoint.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("SBOM context endpoint must be configured.");
|
||||
}
|
||||
|
||||
var requestUri = BuildRequestUri(endpoint, query);
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, requestUri);
|
||||
ApplyTenantHeader(request);
|
||||
|
||||
using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound || response.StatusCode == HttpStatusCode.NoContent)
|
||||
{
|
||||
logger?.LogDebug("Received {StatusCode} for SBOM context request {Uri}; returning null.", (int)response.StatusCode, requestUri);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var content = response.Content is null
|
||||
? string.Empty
|
||||
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
logger?.LogWarning(
|
||||
"SBOM context request {Uri} failed with status {StatusCode}. Payload: {Payload}",
|
||||
requestUri,
|
||||
(int)response.StatusCode,
|
||||
content);
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Providers;
|
||||
|
||||
internal sealed class SbomContextHttpClient : ISbomContextClient
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
private readonly HttpClient httpClient;
|
||||
private readonly SbomContextClientOptions options;
|
||||
private readonly ILogger<SbomContextHttpClient>? logger;
|
||||
|
||||
public SbomContextHttpClient(
|
||||
HttpClient httpClient,
|
||||
IOptions<SbomContextClientOptions> options,
|
||||
ILogger<SbomContextHttpClient>? logger = null)
|
||||
{
|
||||
this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
if (options is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
this.options = options.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
|
||||
if (this.options.BaseAddress is not null && this.httpClient.BaseAddress is null)
|
||||
{
|
||||
this.httpClient.BaseAddress = this.options.BaseAddress;
|
||||
}
|
||||
|
||||
if (this.httpClient.BaseAddress is null)
|
||||
{
|
||||
throw new InvalidOperationException("SBOM context client requires a BaseAddress to be configured.");
|
||||
}
|
||||
|
||||
this.httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/json");
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public async Task<SbomContextDocument?> GetContextAsync(SbomContextQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
if (query is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(query));
|
||||
}
|
||||
|
||||
var endpoint = options.ContextEndpoint?.Trim() ?? string.Empty;
|
||||
if (endpoint.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("SBOM context endpoint must be configured.");
|
||||
}
|
||||
|
||||
var requestUri = BuildRequestUri(endpoint, query);
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, requestUri);
|
||||
ApplyTenantHeader(request);
|
||||
|
||||
using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (response.StatusCode == HttpStatusCode.NotFound || response.StatusCode == HttpStatusCode.NoContent)
|
||||
{
|
||||
logger?.LogDebug("Received {StatusCode} for SBOM context request {Uri}; returning null.", (int)response.StatusCode, requestUri);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var content = response.Content is null
|
||||
? string.Empty
|
||||
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
logger?.LogWarning(
|
||||
"SBOM context request {Uri} failed with status {StatusCode}. Payload: {Payload}",
|
||||
requestUri,
|
||||
(int)response.StatusCode,
|
||||
content);
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
|
||||
var httpContent = response.Content ?? throw new InvalidOperationException("SBOM context response did not include content.");
|
||||
var payload = await httpContent.ReadFromJsonAsync<SbomContextPayload>(SerializerOptions, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (payload is null)
|
||||
{
|
||||
logger?.LogWarning("SBOM context response for {Uri} was empty.", requestUri);
|
||||
return null;
|
||||
}
|
||||
|
||||
return payload.ToDocument();
|
||||
}
|
||||
|
||||
private Uri BuildRequestUri(string endpoint, SbomContextQuery query)
|
||||
{
|
||||
var relative = endpoint.StartsWith("/", StringComparison.Ordinal)
|
||||
? endpoint[1..]
|
||||
: endpoint;
|
||||
|
||||
var queryBuilder = new StringBuilder();
|
||||
|
||||
AppendQuery(queryBuilder, "artifactId", query.ArtifactId);
|
||||
AppendQuery(queryBuilder, "maxTimelineEntries", query.MaxTimelineEntries.ToString(CultureInfo.InvariantCulture));
|
||||
AppendQuery(queryBuilder, "maxDependencyPaths", query.MaxDependencyPaths.ToString(CultureInfo.InvariantCulture));
|
||||
AppendQuery(queryBuilder, "includeEnvironmentFlags", query.IncludeEnvironmentFlags ? "true" : "false");
|
||||
AppendQuery(queryBuilder, "includeBlastRadius", query.IncludeBlastRadius ? "true" : "false");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(query.Purl))
|
||||
{
|
||||
AppendQuery(queryBuilder, "purl", query.Purl!);
|
||||
}
|
||||
|
||||
var uriString = queryBuilder.Length > 0 ? $"{relative}?{queryBuilder}" : relative;
|
||||
return new Uri(httpClient.BaseAddress!, uriString);
|
||||
|
||||
static void AppendQuery(StringBuilder builder, string name, string value)
|
||||
{
|
||||
if (builder.Length > 0)
|
||||
{
|
||||
builder.Append('&');
|
||||
}
|
||||
|
||||
builder.Append(Uri.EscapeDataString(name));
|
||||
builder.Append('=');
|
||||
builder.Append(Uri.EscapeDataString(value));
|
||||
}
|
||||
}
|
||||
|
||||
private void ApplyTenantHeader(HttpRequestMessage request)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(options.Tenant) || string.IsNullOrWhiteSpace(options.TenantHeaderName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!request.Headers.Contains(options.TenantHeaderName))
|
||||
{
|
||||
request.Headers.Add(options.TenantHeaderName, options.Tenant);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record SbomContextPayload(
|
||||
[property: JsonPropertyName("artifactId")] string ArtifactId,
|
||||
[property: JsonPropertyName("purl")] string? Purl,
|
||||
[property: JsonPropertyName("versions")] ImmutableArray<SbomVersionPayload> Versions,
|
||||
[property: JsonPropertyName("dependencyPaths")] ImmutableArray<SbomDependencyPathPayload> DependencyPaths,
|
||||
[property: JsonPropertyName("environmentFlags")] ImmutableDictionary<string, string> EnvironmentFlags,
|
||||
[property: JsonPropertyName("blastRadius")] SbomBlastRadiusPayload? BlastRadius,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomContextDocument ToDocument()
|
||||
=> new(
|
||||
ArtifactId,
|
||||
Purl,
|
||||
Versions.IsDefault ? ImmutableArray<SbomVersionRecord>.Empty : Versions.Select(v => v.ToRecord()).ToImmutableArray(),
|
||||
DependencyPaths.IsDefault ? ImmutableArray<SbomDependencyPathRecord>.Empty : DependencyPaths.Select(p => p.ToRecord()).ToImmutableArray(),
|
||||
EnvironmentFlags == default ? ImmutableDictionary<string, string>.Empty : EnvironmentFlags,
|
||||
BlastRadius?.ToRecord(),
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
|
||||
private sealed record SbomVersionPayload(
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("firstObserved")] DateTimeOffset FirstObserved,
|
||||
[property: JsonPropertyName("lastObserved")] DateTimeOffset? LastObserved,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("source")] string Source,
|
||||
[property: JsonPropertyName("isFixAvailable")] bool IsFixAvailable,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomVersionRecord ToRecord()
|
||||
=> new(
|
||||
Version,
|
||||
FirstObserved,
|
||||
LastObserved,
|
||||
Status,
|
||||
Source,
|
||||
IsFixAvailable,
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
|
||||
private sealed record SbomDependencyPathPayload(
|
||||
[property: JsonPropertyName("nodes")] ImmutableArray<SbomDependencyNodePayload> Nodes,
|
||||
[property: JsonPropertyName("isRuntime")] bool IsRuntime,
|
||||
[property: JsonPropertyName("source")] string? Source,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomDependencyPathRecord ToRecord()
|
||||
=> new(
|
||||
Nodes.IsDefault ? ImmutableArray<SbomDependencyNodeRecord>.Empty : Nodes.Select(n => n.ToRecord()).ToImmutableArray(),
|
||||
IsRuntime,
|
||||
Source,
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
|
||||
private sealed record SbomDependencyNodePayload(
|
||||
[property: JsonPropertyName("identifier")] string Identifier,
|
||||
[property: JsonPropertyName("version")] string? Version)
|
||||
{
|
||||
public SbomDependencyNodeRecord ToRecord()
|
||||
=> new(Identifier, Version);
|
||||
}
|
||||
|
||||
private sealed record SbomBlastRadiusPayload(
|
||||
[property: JsonPropertyName("impactedAssets")] int ImpactedAssets,
|
||||
[property: JsonPropertyName("impactedWorkloads")] int ImpactedWorkloads,
|
||||
[property: JsonPropertyName("impactedNamespaces")] int ImpactedNamespaces,
|
||||
[property: JsonPropertyName("impactedPercentage")] double? ImpactedPercentage,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomBlastRadiusRecord ToRecord()
|
||||
=> new(
|
||||
ImpactedAssets,
|
||||
ImpactedWorkloads,
|
||||
ImpactedNamespaces,
|
||||
ImpactedPercentage,
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
}
|
||||
|
||||
if (payload is null)
|
||||
{
|
||||
logger?.LogWarning("SBOM context response for {Uri} was empty.", requestUri);
|
||||
return null;
|
||||
}
|
||||
|
||||
return payload.ToDocument();
|
||||
}
|
||||
|
||||
private Uri BuildRequestUri(string endpoint, SbomContextQuery query)
|
||||
{
|
||||
var relative = endpoint.StartsWith("/", StringComparison.Ordinal)
|
||||
? endpoint[1..]
|
||||
: endpoint;
|
||||
|
||||
var queryBuilder = new StringBuilder();
|
||||
|
||||
AppendQuery(queryBuilder, "artifactId", query.ArtifactId);
|
||||
AppendQuery(queryBuilder, "maxTimelineEntries", query.MaxTimelineEntries.ToString(CultureInfo.InvariantCulture));
|
||||
AppendQuery(queryBuilder, "maxDependencyPaths", query.MaxDependencyPaths.ToString(CultureInfo.InvariantCulture));
|
||||
AppendQuery(queryBuilder, "includeEnvironmentFlags", query.IncludeEnvironmentFlags ? "true" : "false");
|
||||
AppendQuery(queryBuilder, "includeBlastRadius", query.IncludeBlastRadius ? "true" : "false");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(query.Purl))
|
||||
{
|
||||
AppendQuery(queryBuilder, "purl", query.Purl!);
|
||||
}
|
||||
|
||||
var uriString = queryBuilder.Length > 0 ? $"{relative}?{queryBuilder}" : relative;
|
||||
return new Uri(httpClient.BaseAddress!, uriString);
|
||||
|
||||
static void AppendQuery(StringBuilder builder, string name, string value)
|
||||
{
|
||||
if (builder.Length > 0)
|
||||
{
|
||||
builder.Append('&');
|
||||
}
|
||||
|
||||
builder.Append(Uri.EscapeDataString(name));
|
||||
builder.Append('=');
|
||||
builder.Append(Uri.EscapeDataString(value));
|
||||
}
|
||||
}
|
||||
|
||||
private void ApplyTenantHeader(HttpRequestMessage request)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(options.Tenant) || string.IsNullOrWhiteSpace(options.TenantHeaderName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!request.Headers.Contains(options.TenantHeaderName))
|
||||
{
|
||||
request.Headers.Add(options.TenantHeaderName, options.Tenant);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record SbomContextPayload(
|
||||
[property: JsonPropertyName("artifactId")] string ArtifactId,
|
||||
[property: JsonPropertyName("purl")] string? Purl,
|
||||
[property: JsonPropertyName("versions")] ImmutableArray<SbomVersionPayload> Versions,
|
||||
[property: JsonPropertyName("dependencyPaths")] ImmutableArray<SbomDependencyPathPayload> DependencyPaths,
|
||||
[property: JsonPropertyName("environmentFlags")] ImmutableDictionary<string, string> EnvironmentFlags,
|
||||
[property: JsonPropertyName("blastRadius")] SbomBlastRadiusPayload? BlastRadius,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomContextDocument ToDocument()
|
||||
=> new(
|
||||
ArtifactId,
|
||||
Purl,
|
||||
Versions.IsDefault ? ImmutableArray<SbomVersionRecord>.Empty : Versions.Select(v => v.ToRecord()).ToImmutableArray(),
|
||||
DependencyPaths.IsDefault ? ImmutableArray<SbomDependencyPathRecord>.Empty : DependencyPaths.Select(p => p.ToRecord()).ToImmutableArray(),
|
||||
EnvironmentFlags == default ? ImmutableDictionary<string, string>.Empty : EnvironmentFlags,
|
||||
BlastRadius?.ToRecord(),
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
|
||||
private sealed record SbomVersionPayload(
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("firstObserved")] DateTimeOffset FirstObserved,
|
||||
[property: JsonPropertyName("lastObserved")] DateTimeOffset? LastObserved,
|
||||
[property: JsonPropertyName("status")] string Status,
|
||||
[property: JsonPropertyName("source")] string Source,
|
||||
[property: JsonPropertyName("isFixAvailable")] bool IsFixAvailable,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomVersionRecord ToRecord()
|
||||
=> new(
|
||||
Version,
|
||||
FirstObserved,
|
||||
LastObserved,
|
||||
Status,
|
||||
Source,
|
||||
IsFixAvailable,
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
|
||||
private sealed record SbomDependencyPathPayload(
|
||||
[property: JsonPropertyName("nodes")] ImmutableArray<SbomDependencyNodePayload> Nodes,
|
||||
[property: JsonPropertyName("isRuntime")] bool IsRuntime,
|
||||
[property: JsonPropertyName("source")] string? Source,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomDependencyPathRecord ToRecord()
|
||||
=> new(
|
||||
Nodes.IsDefault ? ImmutableArray<SbomDependencyNodeRecord>.Empty : Nodes.Select(n => n.ToRecord()).ToImmutableArray(),
|
||||
IsRuntime,
|
||||
Source,
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
|
||||
private sealed record SbomDependencyNodePayload(
|
||||
[property: JsonPropertyName("identifier")] string Identifier,
|
||||
[property: JsonPropertyName("version")] string? Version)
|
||||
{
|
||||
public SbomDependencyNodeRecord ToRecord()
|
||||
=> new(Identifier, Version);
|
||||
}
|
||||
|
||||
private sealed record SbomBlastRadiusPayload(
|
||||
[property: JsonPropertyName("impactedAssets")] int ImpactedAssets,
|
||||
[property: JsonPropertyName("impactedWorkloads")] int ImpactedWorkloads,
|
||||
[property: JsonPropertyName("impactedNamespaces")] int ImpactedNamespaces,
|
||||
[property: JsonPropertyName("impactedPercentage")] double? ImpactedPercentage,
|
||||
[property: JsonPropertyName("metadata")] ImmutableDictionary<string, string> Metadata)
|
||||
{
|
||||
public SbomBlastRadiusRecord ToRecord()
|
||||
=> new(
|
||||
ImpactedAssets,
|
||||
ImpactedWorkloads,
|
||||
ImpactedNamespaces,
|
||||
ImpactedPercentage,
|
||||
Metadata == default ? ImmutableDictionary<string, string>.Empty : Metadata);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -4,12 +4,12 @@
|
||||
| AIAI-31-001 | DONE (2025-11-02) | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. | Retrievers return deterministic chunks with source IDs/sections; unit tests cover CSAF/OSV/vendor formats. |
|
||||
| AIAI-31-002 | DONE (2025-11-04) | Advisory AI Guild, SBOM Service Guild | SBOM-VULN-29-001 | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Retriever returns paths/metrics under SLA; tests cover ecosystems. |
|
||||
| AIAI-31-003 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. |
|
||||
| AIAI-31-004 | DOING | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. |
|
||||
| AIAI-31-004A | DOING (2025-11-04) | Advisory AI Guild, Platform Guild | AIAI-31-004, AIAI-31-002 | Wire `AdvisoryPipelineOrchestrator` into WebService/Worker, expose API/queue contracts, emit metrics, and stand up cache stub. | API returns plan metadata; worker executes queue message; metrics recorded; doc updated. |
|
||||
| AIAI-31-004 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. |
|
||||
| AIAI-31-004A | DONE (2025-11-04) | Advisory AI Guild, Platform Guild | AIAI-31-004, AIAI-31-002 | Wire `AdvisoryPipelineOrchestrator` into WebService/Worker, expose API/queue contracts, emit metrics, and stand up cache stub. | API returns plan metadata; worker executes queue message; metrics recorded; doc updated. |
|
||||
| AIAI-31-004B | TODO | Advisory AI Guild, Security Guild | AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004 | Implement prompt assembler, guardrail plumbing, cache persistence, DSSE provenance; add golden outputs. | Deterministic outputs cached; guardrails enforced; tests cover prompt assembly + caching. |
|
||||
| AIAI-31-004C | TODO | Advisory AI Guild, CLI Guild, Docs Guild | AIAI-31-004B, CLI-AIAI-31-003 | Deliver CLI `stella advise run <task>` command, renderers, documentation updates, and CLI golden tests. | CLI command produces deterministic output; docs published; smoke run recorded. |
|
||||
| AIAI-31-005 | DOING (2025-11-03) | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. |
|
||||
| AIAI-31-006 | DOING (2025-11-03) | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. |
|
||||
| AIAI-31-005 | DONE (2025-11-04) | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. |
|
||||
| AIAI-31-006 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. |
|
||||
| AIAI-31-007 | TODO | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. |
|
||||
| AIAI-31-008 | TODO | Advisory AI Guild, DevOps Guild | AIAI-31-006..007 | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. | Deployment docs merged; smoke deploy executed; offline kit updated; feature flags documented. |
|
||||
| AIAI-31-010 | DONE (2025-11-02) | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement Concelier advisory raw document provider mapping CSAF/OSV payloads into structured chunks for retrieval. | Provider resolves content format, preserves metadata, and passes unit tests covering CSAF/OSV cases. |
|
||||
@@ -23,5 +23,11 @@
|
||||
> 2025-11-04: AIAI-31-003 completed – toolset wired via DI/orchestrator, SBOM context client available, and unit coverage for compare/range/dependency analysis extended.
|
||||
|
||||
> 2025-11-02: AIAI-31-004 started orchestration pipeline work – begin designing summary/conflict/remediation workflow (deterministic sequence + cache keys).
|
||||
> 2025-11-04: AIAI-31-004 DONE – orchestrator composes structured/vector/SBOM context with stable cache keys and metadata (env flags, blast radius, dependency metrics); unit coverage via `AdvisoryPipelineOrchestratorTests` keeps determinism enforced.
|
||||
|
||||
> 2025-11-02: AIAI-31-004 orchestration prerequisites documented in docs/modules/advisory-ai/orchestration-pipeline.md (task breakdown 004A/004B/004C).
|
||||
> 2025-11-04: AIAI-31-004A DONE – WebService `/v1/advisory-ai/pipeline/*` + batch endpoints enqueue plans with rate limiting & scope headers, Worker drains filesystem queue, metrics/logging added, docs updated. Tests: `dotnet test src/AdvisoryAI/__Tests/StellaOps.AdvisoryAI.Tests/StellaOps.AdvisoryAI.Tests.csproj --no-restore`.
|
||||
|
||||
> 2025-11-04: AIAI-31-005 DONE – guardrail pipeline redacts secrets, enforces citation/injection policies, emits block counters, and tests (`AdvisoryGuardrailPipelineTests`) cover redaction + citation validation.
|
||||
|
||||
> 2025-11-04: AIAI-31-006 DONE – REST endpoints enforce header scopes, apply token bucket rate limiting, sanitize prompts via guardrails, and queue execution with cached metadata. Tests executed via `dotnet test src/AdvisoryAI/__Tests/StellaOps.AdvisoryAI.Tests/StellaOps.AdvisoryAI.Tests.csproj --no-restore`.
|
||||
|
||||
@@ -1,79 +1,79 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class DeterministicToolsetTests
|
||||
{
|
||||
[Fact]
|
||||
public void AnalyzeDependencies_ComputesRuntimeAndDevelopmentCounts()
|
||||
{
|
||||
var context = SbomContextResult.Create(
|
||||
"artifact-123",
|
||||
purl: null,
|
||||
versionTimeline: Array.Empty<SbomVersionTimelineEntry>(),
|
||||
dependencyPaths: new[]
|
||||
{
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("lib-a", "2.0.0"),
|
||||
},
|
||||
isRuntime: true),
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("lib-b", "3.1.4"),
|
||||
},
|
||||
isRuntime: false),
|
||||
});
|
||||
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
var analysis = toolset.AnalyzeDependencies(context);
|
||||
|
||||
analysis.ArtifactId.Should().Be("artifact-123");
|
||||
analysis.Metadata["path_count"].Should().Be("2");
|
||||
analysis.Metadata["runtime_path_count"].Should().Be("1");
|
||||
analysis.Metadata["development_path_count"].Should().Be("1");
|
||||
analysis.Nodes.Should().HaveCount(3);
|
||||
|
||||
var libA = analysis.Nodes.Single(node => node.Identifier == "lib-a");
|
||||
libA.RuntimeOccurrences.Should().Be(1);
|
||||
libA.DevelopmentOccurrences.Should().Be(0);
|
||||
|
||||
var libB = analysis.Nodes.Single(node => node.Identifier == "lib-b");
|
||||
libB.RuntimeOccurrences.Should().Be(0);
|
||||
libB.DevelopmentOccurrences.Should().Be(1);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("semver", "1.2.3", "1.2.4", -1)]
|
||||
[InlineData("semver", "1.2.3", "1.2.3", 0)]
|
||||
[InlineData("semver", "1.2.4", "1.2.3", 1)]
|
||||
[InlineData("evr", "1:1.0-1", "1:1.0-2", -1)]
|
||||
[InlineData("evr", "0:2.0-0", "0:2.0-0", 0)]
|
||||
[InlineData("evr", "0:2.1-0", "0:2.0-5", 1)]
|
||||
public void TryCompare_SucceedsForSupportedSchemes(string scheme, string left, string right, int expected)
|
||||
{
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
toolset.TryCompare(scheme, left, right, out var comparison).Should().BeTrue();
|
||||
comparison.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("semver", "1.2.3", ">=1.0.0 <2.0.0")]
|
||||
[InlineData("semver", "2.0.0", ">=2.0.0")]
|
||||
[InlineData("evr", "0:1.2-3", ">=0:1.0-0 <0:2.0-0")]
|
||||
[InlineData("evr", "1:3.4-1", ">=1:3.0-0")]
|
||||
public void SatisfiesRange_HonoursExpressions(string scheme, string version, string range)
|
||||
{
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
toolset.SatisfiesRange(scheme, version, range).Should().BeTrue();
|
||||
}
|
||||
}
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using FluentAssertions;
|
||||
using StellaOps.AdvisoryAI.Context;
|
||||
using StellaOps.AdvisoryAI.Tools;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class DeterministicToolsetTests
|
||||
{
|
||||
[Fact]
|
||||
public void AnalyzeDependencies_ComputesRuntimeAndDevelopmentCounts()
|
||||
{
|
||||
var context = SbomContextResult.Create(
|
||||
"artifact-123",
|
||||
purl: null,
|
||||
versionTimeline: Array.Empty<SbomVersionTimelineEntry>(),
|
||||
dependencyPaths: new[]
|
||||
{
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("lib-a", "2.0.0"),
|
||||
},
|
||||
isRuntime: true),
|
||||
new SbomDependencyPath(
|
||||
new[]
|
||||
{
|
||||
new SbomDependencyNode("root", "1.0.0"),
|
||||
new SbomDependencyNode("lib-b", "3.1.4"),
|
||||
},
|
||||
isRuntime: false),
|
||||
});
|
||||
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
var analysis = toolset.AnalyzeDependencies(context);
|
||||
|
||||
analysis.ArtifactId.Should().Be("artifact-123");
|
||||
analysis.Metadata["path_count"].Should().Be("2");
|
||||
analysis.Metadata["runtime_path_count"].Should().Be("1");
|
||||
analysis.Metadata["development_path_count"].Should().Be("1");
|
||||
analysis.Nodes.Should().HaveCount(3);
|
||||
|
||||
var libA = analysis.Nodes.Single(node => node.Identifier == "lib-a");
|
||||
libA.RuntimeOccurrences.Should().Be(1);
|
||||
libA.DevelopmentOccurrences.Should().Be(0);
|
||||
|
||||
var libB = analysis.Nodes.Single(node => node.Identifier == "lib-b");
|
||||
libB.RuntimeOccurrences.Should().Be(0);
|
||||
libB.DevelopmentOccurrences.Should().Be(1);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("semver", "1.2.3", "1.2.4", -1)]
|
||||
[InlineData("semver", "1.2.3", "1.2.3", 0)]
|
||||
[InlineData("semver", "1.2.4", "1.2.3", 1)]
|
||||
[InlineData("evr", "1:1.0-1", "1:1.0-2", -1)]
|
||||
[InlineData("evr", "0:2.0-0", "0:2.0-0", 0)]
|
||||
[InlineData("evr", "0:2.1-0", "0:2.0-5", 1)]
|
||||
public void TryCompare_SucceedsForSupportedSchemes(string scheme, string left, string right, int expected)
|
||||
{
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
toolset.TryCompare(scheme, left, right, out var comparison).Should().BeTrue();
|
||||
comparison.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("semver", "1.2.3", ">=1.0.0 <2.0.0")]
|
||||
[InlineData("semver", "2.0.0", ">=2.0.0")]
|
||||
[InlineData("evr", "0:1.2-3", ">=0:1.0-0 <0:2.0-0")]
|
||||
[InlineData("evr", "1:3.4-1", ">=1:3.0-0")]
|
||||
public void SatisfiesRange_HonoursExpressions(string scheme, string version, string range)
|
||||
{
|
||||
IDeterministicToolset toolset = new DeterministicToolset();
|
||||
toolset.SatisfiesRange(scheme, version, range).Should().BeTrue();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,144 +1,144 @@
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Providers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class SbomContextHttpClientTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task GetContextAsync_MapsPayloadToDocument()
|
||||
{
|
||||
const string payload = """
|
||||
{
|
||||
"artifactId": "artifact-001",
|
||||
"purl": "pkg:npm/react@18.3.0",
|
||||
"versions": [
|
||||
{
|
||||
"version": "18.3.0",
|
||||
"firstObserved": "2025-10-01T00:00:00Z",
|
||||
"lastObserved": null,
|
||||
"status": "affected",
|
||||
"source": "inventory",
|
||||
"isFixAvailable": false,
|
||||
"metadata": { "note": "current" }
|
||||
}
|
||||
],
|
||||
"dependencyPaths": [
|
||||
{
|
||||
"nodes": [
|
||||
{ "identifier": "app", "version": "1.0.0" },
|
||||
{ "identifier": "react", "version": "18.3.0" }
|
||||
],
|
||||
"isRuntime": true,
|
||||
"source": "scanner",
|
||||
"metadata": { "scope": "production" }
|
||||
}
|
||||
],
|
||||
"environmentFlags": {
|
||||
"environment/prod": "true"
|
||||
},
|
||||
"blastRadius": {
|
||||
"impactedAssets": 10,
|
||||
"impactedWorkloads": 4,
|
||||
"impactedNamespaces": 2,
|
||||
"impactedPercentage": 0.25,
|
||||
"metadata": { "note": "simulated" }
|
||||
},
|
||||
"metadata": {
|
||||
"source": "sbom-service"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = new StringContent(payload, Encoding.UTF8, "application/json")
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(handler)
|
||||
{
|
||||
BaseAddress = new Uri("https://sbom.example/")
|
||||
};
|
||||
|
||||
var options = Options.Create(new SbomContextClientOptions
|
||||
{
|
||||
ContextEndpoint = "api/sbom/context",
|
||||
Tenant = "tenant-alpha",
|
||||
TenantHeaderName = "X-StellaOps-Tenant"
|
||||
});
|
||||
|
||||
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
|
||||
|
||||
var query = new SbomContextQuery("artifact-001", "pkg:npm/react@18.3.0", 25, 10, includeEnvironmentFlags: true, includeBlastRadius: true);
|
||||
var document = await client.GetContextAsync(query, CancellationToken.None);
|
||||
|
||||
Assert.NotNull(document);
|
||||
Assert.Equal("artifact-001", document!.ArtifactId);
|
||||
Assert.Equal("pkg:npm/react@18.3.0", document.Purl);
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Providers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class SbomContextHttpClientTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task GetContextAsync_MapsPayloadToDocument()
|
||||
{
|
||||
const string payload = """
|
||||
{
|
||||
"artifactId": "artifact-001",
|
||||
"purl": "pkg:npm/react@18.3.0",
|
||||
"versions": [
|
||||
{
|
||||
"version": "18.3.0",
|
||||
"firstObserved": "2025-10-01T00:00:00Z",
|
||||
"lastObserved": null,
|
||||
"status": "affected",
|
||||
"source": "inventory",
|
||||
"isFixAvailable": false,
|
||||
"metadata": { "note": "current" }
|
||||
}
|
||||
],
|
||||
"dependencyPaths": [
|
||||
{
|
||||
"nodes": [
|
||||
{ "identifier": "app", "version": "1.0.0" },
|
||||
{ "identifier": "react", "version": "18.3.0" }
|
||||
],
|
||||
"isRuntime": true,
|
||||
"source": "scanner",
|
||||
"metadata": { "scope": "production" }
|
||||
}
|
||||
],
|
||||
"environmentFlags": {
|
||||
"environment/prod": "true"
|
||||
},
|
||||
"blastRadius": {
|
||||
"impactedAssets": 10,
|
||||
"impactedWorkloads": 4,
|
||||
"impactedNamespaces": 2,
|
||||
"impactedPercentage": 0.25,
|
||||
"metadata": { "note": "simulated" }
|
||||
},
|
||||
"metadata": {
|
||||
"source": "sbom-service"
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = new StringContent(payload, Encoding.UTF8, "application/json")
|
||||
});
|
||||
|
||||
var httpClient = new HttpClient(handler)
|
||||
{
|
||||
BaseAddress = new Uri("https://sbom.example/")
|
||||
};
|
||||
|
||||
var options = Options.Create(new SbomContextClientOptions
|
||||
{
|
||||
ContextEndpoint = "api/sbom/context",
|
||||
Tenant = "tenant-alpha",
|
||||
TenantHeaderName = "X-StellaOps-Tenant"
|
||||
});
|
||||
|
||||
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
|
||||
|
||||
var query = new SbomContextQuery("artifact-001", "pkg:npm/react@18.3.0", 25, 10, includeEnvironmentFlags: true, includeBlastRadius: true);
|
||||
var document = await client.GetContextAsync(query, CancellationToken.None);
|
||||
|
||||
Assert.NotNull(document);
|
||||
Assert.Equal("artifact-001", document!.ArtifactId);
|
||||
Assert.Equal("pkg:npm/react@18.3.0", document.Purl);
|
||||
Assert.Single(document.Versions);
|
||||
Assert.Single(document.DependencyPaths);
|
||||
Assert.Single(document.EnvironmentFlags);
|
||||
Assert.NotNull(document.BlastRadius);
|
||||
Assert.Equal("sbom-service", document.Metadata["source"]);
|
||||
|
||||
Assert.NotNull(handler.LastRequest);
|
||||
Assert.Equal("tenant-alpha", handler.LastRequest!.Headers.GetValues("X-StellaOps-Tenant").Single());
|
||||
Assert.Contains("artifactId=artifact-001", handler.LastRequest.RequestUri!.Query);
|
||||
Assert.Contains("purl=pkg%3Anpm%2Freact%4018.3.0", handler.LastRequest.RequestUri!.Query);
|
||||
Assert.Contains("includeEnvironmentFlags=true", handler.LastRequest.RequestUri!.Query);
|
||||
Assert.Contains("includeBlastRadius=true", handler.LastRequest.RequestUri!.Query);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetContextAsync_ReturnsNullOnNotFound()
|
||||
{
|
||||
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.NotFound));
|
||||
var httpClient = new HttpClient(handler) { BaseAddress = new Uri("https://sbom.example/") };
|
||||
var options = Options.Create(new SbomContextClientOptions());
|
||||
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
|
||||
|
||||
var result = await client.GetContextAsync(new SbomContextQuery("missing", null, 10, 5, false, false), CancellationToken.None);
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetContextAsync_ThrowsForServerError()
|
||||
{
|
||||
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.InternalServerError)
|
||||
{
|
||||
Content = new StringContent("{\"error\":\"boom\"}", Encoding.UTF8, "application/json")
|
||||
});
|
||||
var httpClient = new HttpClient(handler) { BaseAddress = new Uri("https://sbom.example/") };
|
||||
var options = Options.Create(new SbomContextClientOptions());
|
||||
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
|
||||
|
||||
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetContextAsync(new SbomContextQuery("artifact", null, 5, 5, false, false), CancellationToken.None));
|
||||
}
|
||||
|
||||
private sealed class StubHttpMessageHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly Func<HttpRequestMessage, HttpResponseMessage> responder;
|
||||
|
||||
public StubHttpMessageHandler(Func<HttpRequestMessage, HttpResponseMessage> responder)
|
||||
{
|
||||
this.responder = responder ?? throw new ArgumentNullException(nameof(responder));
|
||||
}
|
||||
|
||||
public HttpRequestMessage? LastRequest { get; private set; }
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRequest = request;
|
||||
return Task.FromResult(responder(request));
|
||||
}
|
||||
}
|
||||
}
|
||||
Assert.Single(document.DependencyPaths);
|
||||
Assert.Single(document.EnvironmentFlags);
|
||||
Assert.NotNull(document.BlastRadius);
|
||||
Assert.Equal("sbom-service", document.Metadata["source"]);
|
||||
|
||||
Assert.NotNull(handler.LastRequest);
|
||||
Assert.Equal("tenant-alpha", handler.LastRequest!.Headers.GetValues("X-StellaOps-Tenant").Single());
|
||||
Assert.Contains("artifactId=artifact-001", handler.LastRequest.RequestUri!.Query);
|
||||
Assert.Contains("purl=pkg%3Anpm%2Freact%4018.3.0", handler.LastRequest.RequestUri!.Query);
|
||||
Assert.Contains("includeEnvironmentFlags=true", handler.LastRequest.RequestUri!.Query);
|
||||
Assert.Contains("includeBlastRadius=true", handler.LastRequest.RequestUri!.Query);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetContextAsync_ReturnsNullOnNotFound()
|
||||
{
|
||||
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.NotFound));
|
||||
var httpClient = new HttpClient(handler) { BaseAddress = new Uri("https://sbom.example/") };
|
||||
var options = Options.Create(new SbomContextClientOptions());
|
||||
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
|
||||
|
||||
var result = await client.GetContextAsync(new SbomContextQuery("missing", null, 10, 5, false, false), CancellationToken.None);
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetContextAsync_ThrowsForServerError()
|
||||
{
|
||||
var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.InternalServerError)
|
||||
{
|
||||
Content = new StringContent("{\"error\":\"boom\"}", Encoding.UTF8, "application/json")
|
||||
});
|
||||
var httpClient = new HttpClient(handler) { BaseAddress = new Uri("https://sbom.example/") };
|
||||
var options = Options.Create(new SbomContextClientOptions());
|
||||
var client = new SbomContextHttpClient(httpClient, options, NullLogger<SbomContextHttpClient>.Instance);
|
||||
|
||||
await Assert.ThrowsAsync<HttpRequestException>(() => client.GetContextAsync(new SbomContextQuery("artifact", null, 5, 5, false, false), CancellationToken.None));
|
||||
}
|
||||
|
||||
private sealed class StubHttpMessageHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly Func<HttpRequestMessage, HttpResponseMessage> responder;
|
||||
|
||||
public StubHttpMessageHandler(Func<HttpRequestMessage, HttpResponseMessage> responder)
|
||||
{
|
||||
this.responder = responder ?? throw new ArgumentNullException(nameof(responder));
|
||||
}
|
||||
|
||||
public HttpRequestMessage? LastRequest { get; private set; }
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
LastRequest = request;
|
||||
return Task.FromResult(responder(request));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.AdvisoryAI\StellaOps.AdvisoryAI.csproj" />
|
||||
<ProjectReference Include="..\..\StellaOps.AdvisoryAI.Hosting\StellaOps.AdvisoryAI.Hosting.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Update="TestData/*.json">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="TestData/*.md">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" />
|
||||
<ProjectReference Include="..\..\..\Excititor\__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Update="TestData/*.json">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Update="TestData/*.md">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -10,30 +10,30 @@ using StellaOps.AdvisoryAI.Tools;
|
||||
using StellaOps.AdvisoryAI.Abstractions;
|
||||
using StellaOps.AdvisoryAI.Documents;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class ToolsetServiceCollectionExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void AddAdvisoryDeterministicToolset_RegistersSingleton()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddAdvisoryDeterministicToolset();
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
var toolsetA = provider.GetRequiredService<IDeterministicToolset>();
|
||||
var toolsetB = provider.GetRequiredService<IDeterministicToolset>();
|
||||
|
||||
Assert.Same(toolsetA, toolsetB);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddAdvisoryPipeline_RegistersOrchestrator()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public sealed class ToolsetServiceCollectionExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void AddAdvisoryDeterministicToolset_RegistersSingleton()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddAdvisoryDeterministicToolset();
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
var toolsetA = provider.GetRequiredService<IDeterministicToolset>();
|
||||
var toolsetB = provider.GetRequiredService<IDeterministicToolset>();
|
||||
|
||||
Assert.Same(toolsetA, toolsetB);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddAdvisoryPipeline_RegistersOrchestrator()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddSbomContext(options =>
|
||||
{
|
||||
options.BaseAddress = new Uri("https://sbom.example/");
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles. <br>⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. |
|
||||
| PLG7.RFC | DONE (2025-11-03) | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. |
|
||||
| PLG7.IMPL-001 | DONE (2025-11-03) | BE-Auth Plugin | PLG7.RFC | Scaffold `StellaOps.Authority.Plugin.Ldap` + tests, bind configuration (client certificate, trust-store, insecure toggle) with validation and docs samples. | ✅ Project + test harness build; ✅ Configuration bound & validated; ✅ Sample config updated. |
|
||||
| PLG7.IMPL-002 | DOING (2025-11-03) | BE-Auth Plugin, Security Guild | PLG7.IMPL-001 | Implement LDAP credential store with TLS/mutual TLS enforcement, deterministic retry/backoff, and structured logging/metrics. | ✅ Credential store passes integration tests (OpenLDAP + mtls); ✅ Metrics/logs emitted; ✅ Error mapping documented. |
|
||||
| PLG7.IMPL-002 | DONE (2025-11-04) | BE-Auth Plugin, Security Guild | PLG7.IMPL-001 | Implement LDAP credential store with TLS/mutual TLS enforcement, deterministic retry/backoff, and structured logging/metrics. | ✅ Credential store passes integration tests (OpenLDAP + mtls); ✅ Metrics/logs emitted; ✅ Error mapping documented.<br>2025-11-04: DirectoryServices factory now enforces TLS/mTLS options, credential store retries use deterministic backoff with metrics, audit logging includes failure codes, and unit suite (`dotnet test src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Ldap.Tests`) remains green. |
|
||||
| PLG7.IMPL-003 | TODO | BE-Auth Plugin | PLG7.IMPL-001 | Deliver claims enricher with DN-to-role dictionary and regex mapping plus Mongo cache, including determinism + eviction tests. | ✅ Regex mapping deterministic; ✅ Cache TTL + invalidation tested; ✅ Claims doc updated. |
|
||||
| PLG7.IMPL-004 | TODO | BE-Auth Plugin, DevOps Guild | PLG7.IMPL-002 | Implement client provisioning store with LDAP write toggles, Mongo audit mirror, bootstrap validation, and health reporting. | ✅ Audit mirror records persisted; ✅ Bootstrap validation logs capability summary; ✅ Health checks cover LDAP + audit mirror. |
|
||||
| PLG7.IMPL-005 | TODO | BE-Auth Plugin, Docs Guild | PLG7.IMPL-001..004 | Update developer guide, samples, and release notes for LDAP plugin (mutual TLS, regex mapping, audit mirror) and ensure Offline Kit coverage. | ✅ Docs merged; ✅ Release notes drafted; ✅ Offline kit config templates updated. |
|
||||
|
||||
@@ -1,282 +1,282 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using System.Net.Http.Headers;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Configuration;
|
||||
using Xunit;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
|
||||
namespace StellaOps.Authority.Tests.AdvisoryAi;
|
||||
|
||||
public sealed class AdvisoryAiRemoteInferenceEndpointTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public AdvisoryAiRemoteInferenceEndpointTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_ReturnsForbidden_WhenDisabled()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
options.AdvisoryAi.RemoteInference.Enabled = false;
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Clear();
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai");
|
||||
});
|
||||
|
||||
var response = await client.PostAsJsonAsync(
|
||||
"/advisory-ai/remote-inference/logs",
|
||||
CreatePayload(profile: "cloud-openai"));
|
||||
|
||||
Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("remote_inference_disabled", body!["error"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_ReturnsForbidden_WhenConsentMissing()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
SeedRemoteInferenceEnabled(options);
|
||||
SeedTenantConsent(options);
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentGranted = false;
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentVersion = null;
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentedAt = null;
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentedBy = null;
|
||||
});
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync(
|
||||
"/advisory-ai/remote-inference/logs",
|
||||
CreatePayload(profile: "cloud-openai"));
|
||||
|
||||
Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("remote_inference_consent_required", body!["error"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_ReturnsBadRequest_WhenProfileNotAllowed()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
SeedRemoteInferenceEnabled(options);
|
||||
SeedTenantConsent(options);
|
||||
});
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync(
|
||||
"/advisory-ai/remote-inference/logs",
|
||||
CreatePayload(profile: "other-profile"));
|
||||
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("profile_not_allowed", body!["error"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_LogsPrompt_WhenConsentGranted()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
SeedRemoteInferenceEnabled(options);
|
||||
SeedTenantConsent(options);
|
||||
});
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
|
||||
var database = new MongoClient(factory.ConnectionString).GetDatabase("authority-tests");
|
||||
var collection = database.GetCollection<BsonDocument>("authority_login_attempts");
|
||||
await collection.DeleteManyAsync(FilterDefinition<BsonDocument>.Empty);
|
||||
|
||||
var payload = CreatePayload(profile: "cloud-openai", prompt: "Generate remediation plan.");
|
||||
var response = await client.PostAsJsonAsync("/advisory-ai/remote-inference/logs", payload);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("logged", body!["status"]);
|
||||
|
||||
var expectedHash = ComputeSha256(payload.Prompt);
|
||||
Assert.Equal(expectedHash, body["prompt_hash"]);
|
||||
|
||||
var doc = await collection.Find(Builders<BsonDocument>.Filter.Eq("eventType", "authority.advisory_ai.remote_inference")).SingleAsync();
|
||||
Assert.Equal("authority.advisory_ai.remote_inference", doc["eventType"].AsString);
|
||||
|
||||
var properties = ExtractProperties(doc);
|
||||
Assert.Equal(expectedHash, properties["advisory_ai.prompt.hash"]);
|
||||
Assert.Equal("sha256", properties["advisory_ai.prompt.algorithm"]);
|
||||
Assert.Equal(payload.Profile, properties["advisory_ai.profile"]);
|
||||
Assert.False(properties.ContainsKey("advisory_ai.prompt.raw"));
|
||||
}
|
||||
|
||||
private HttpClient CreateClient(Action<StellaOpsAuthorityOptions>? configureOptions = null)
|
||||
{
|
||||
const string schemeName = "StellaOpsBearer";
|
||||
|
||||
var builder = factory.WithWebHostBuilder(hostBuilder =>
|
||||
{
|
||||
hostBuilder.ConfigureTestServices(services =>
|
||||
{
|
||||
services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = schemeName;
|
||||
options.DefaultChallengeScheme = schemeName;
|
||||
})
|
||||
.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(schemeName, _ => { });
|
||||
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(opts =>
|
||||
{
|
||||
opts.Issuer ??= new Uri("https://authority.test");
|
||||
if (string.IsNullOrWhiteSpace(opts.Storage.ConnectionString))
|
||||
{
|
||||
opts.Storage.ConnectionString = factory.ConnectionString;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(opts.Storage.DatabaseName))
|
||||
{
|
||||
opts.Storage.DatabaseName = "authority-tests";
|
||||
}
|
||||
|
||||
opts.AdvisoryAi.RemoteInference.Enabled = true;
|
||||
opts.AdvisoryAi.RemoteInference.RequireTenantConsent = true;
|
||||
opts.AdvisoryAi.RemoteInference.AllowedProfiles.Clear();
|
||||
opts.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai");
|
||||
|
||||
opts.Tenants.Clear();
|
||||
opts.Tenants.Add(new AuthorityTenantOptions
|
||||
{
|
||||
Id = "tenant-default",
|
||||
DisplayName = "Tenant Default",
|
||||
AdvisoryAi =
|
||||
{
|
||||
RemoteInference =
|
||||
{
|
||||
ConsentGranted = true,
|
||||
ConsentVersion = "2025-10",
|
||||
ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z"),
|
||||
ConsentedBy = "legal@example.com"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
configureOptions?.Invoke(opts);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
var client = builder.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(schemeName);
|
||||
return client;
|
||||
}
|
||||
|
||||
private static void SeedRemoteInferenceEnabled(StellaOpsAuthorityOptions options)
|
||||
{
|
||||
options.AdvisoryAi.RemoteInference.Enabled = true;
|
||||
options.AdvisoryAi.RemoteInference.RequireTenantConsent = true;
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Clear();
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai");
|
||||
}
|
||||
|
||||
private static void SeedTenantConsent(StellaOpsAuthorityOptions options)
|
||||
{
|
||||
if (options.Tenants.Count == 0)
|
||||
{
|
||||
options.Tenants.Add(new AuthorityTenantOptions { Id = "tenant-default", DisplayName = "Tenant Default" });
|
||||
}
|
||||
|
||||
var tenant = options.Tenants[0];
|
||||
tenant.Id = "tenant-default";
|
||||
tenant.DisplayName = "Tenant Default";
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentGranted = true;
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentVersion = "2025-10";
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z");
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentedBy = "legal@example.com";
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string value)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(value));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> ExtractProperties(BsonDocument document)
|
||||
{
|
||||
var result = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
if (!document.TryGetValue("properties", out var propertiesValue))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
foreach (var item in propertiesValue.AsBsonArray)
|
||||
{
|
||||
if (item is not BsonDocument property)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var name = property.TryGetValue("name", out var nameValue) ? nameValue.AsString : null;
|
||||
var value = property.TryGetValue("value", out var valueNode) ? valueNode.AsString : null;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
result[name] = value ?? string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static RemoteInferencePayload CreatePayload(string profile, string prompt = "Summarize remedations.")
|
||||
{
|
||||
return new RemoteInferencePayload(
|
||||
TaskType: "summary",
|
||||
Profile: profile,
|
||||
ModelId: "gpt-4o-mini",
|
||||
Prompt: prompt,
|
||||
ContextDigest: "sha256:context",
|
||||
OutputHash: "sha256:output",
|
||||
TaskId: "task-123",
|
||||
Metadata: new Dictionary<string, string>
|
||||
{
|
||||
["channel"] = "cli",
|
||||
["env"] = "test"
|
||||
});
|
||||
}
|
||||
|
||||
private sealed record RemoteInferencePayload(
|
||||
[property: JsonPropertyName("taskType")] string TaskType,
|
||||
[property: JsonPropertyName("profile")] string Profile,
|
||||
[property: JsonPropertyName("modelId")] string ModelId,
|
||||
[property: JsonPropertyName("prompt")] string Prompt,
|
||||
[property: JsonPropertyName("contextDigest")] string ContextDigest,
|
||||
[property: JsonPropertyName("outputHash")] string OutputHash,
|
||||
[property: JsonPropertyName("taskId")] string TaskId,
|
||||
[property: JsonPropertyName("metadata")] IDictionary<string, string> Metadata);
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using System.Net.Http.Headers;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Configuration;
|
||||
using Xunit;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
|
||||
namespace StellaOps.Authority.Tests.AdvisoryAi;
|
||||
|
||||
public sealed class AdvisoryAiRemoteInferenceEndpointTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public AdvisoryAiRemoteInferenceEndpointTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_ReturnsForbidden_WhenDisabled()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
options.AdvisoryAi.RemoteInference.Enabled = false;
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Clear();
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai");
|
||||
});
|
||||
|
||||
var response = await client.PostAsJsonAsync(
|
||||
"/advisory-ai/remote-inference/logs",
|
||||
CreatePayload(profile: "cloud-openai"));
|
||||
|
||||
Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("remote_inference_disabled", body!["error"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_ReturnsForbidden_WhenConsentMissing()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
SeedRemoteInferenceEnabled(options);
|
||||
SeedTenantConsent(options);
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentGranted = false;
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentVersion = null;
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentedAt = null;
|
||||
options.Tenants[0].AdvisoryAi.RemoteInference.ConsentedBy = null;
|
||||
});
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync(
|
||||
"/advisory-ai/remote-inference/logs",
|
||||
CreatePayload(profile: "cloud-openai"));
|
||||
|
||||
Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("remote_inference_consent_required", body!["error"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_ReturnsBadRequest_WhenProfileNotAllowed()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
SeedRemoteInferenceEnabled(options);
|
||||
SeedTenantConsent(options);
|
||||
});
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync(
|
||||
"/advisory-ai/remote-inference/logs",
|
||||
CreatePayload(profile: "other-profile"));
|
||||
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("profile_not_allowed", body!["error"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemoteInference_LogsPrompt_WhenConsentGranted()
|
||||
{
|
||||
using var client = CreateClient(
|
||||
configureOptions: options =>
|
||||
{
|
||||
SeedRemoteInferenceEnabled(options);
|
||||
SeedTenantConsent(options);
|
||||
});
|
||||
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
|
||||
var database = new MongoClient(factory.ConnectionString).GetDatabase("authority-tests");
|
||||
var collection = database.GetCollection<BsonDocument>("authority_login_attempts");
|
||||
await collection.DeleteManyAsync(FilterDefinition<BsonDocument>.Empty);
|
||||
|
||||
var payload = CreatePayload(profile: "cloud-openai", prompt: "Generate remediation plan.");
|
||||
var response = await client.PostAsJsonAsync("/advisory-ai/remote-inference/logs", payload);
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
var body = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(body);
|
||||
Assert.Equal("logged", body!["status"]);
|
||||
|
||||
var expectedHash = ComputeSha256(payload.Prompt);
|
||||
Assert.Equal(expectedHash, body["prompt_hash"]);
|
||||
|
||||
var doc = await collection.Find(Builders<BsonDocument>.Filter.Eq("eventType", "authority.advisory_ai.remote_inference")).SingleAsync();
|
||||
Assert.Equal("authority.advisory_ai.remote_inference", doc["eventType"].AsString);
|
||||
|
||||
var properties = ExtractProperties(doc);
|
||||
Assert.Equal(expectedHash, properties["advisory_ai.prompt.hash"]);
|
||||
Assert.Equal("sha256", properties["advisory_ai.prompt.algorithm"]);
|
||||
Assert.Equal(payload.Profile, properties["advisory_ai.profile"]);
|
||||
Assert.False(properties.ContainsKey("advisory_ai.prompt.raw"));
|
||||
}
|
||||
|
||||
private HttpClient CreateClient(Action<StellaOpsAuthorityOptions>? configureOptions = null)
|
||||
{
|
||||
const string schemeName = "StellaOpsBearer";
|
||||
|
||||
var builder = factory.WithWebHostBuilder(hostBuilder =>
|
||||
{
|
||||
hostBuilder.ConfigureTestServices(services =>
|
||||
{
|
||||
services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = schemeName;
|
||||
options.DefaultChallengeScheme = schemeName;
|
||||
})
|
||||
.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(schemeName, _ => { });
|
||||
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(opts =>
|
||||
{
|
||||
opts.Issuer ??= new Uri("https://authority.test");
|
||||
if (string.IsNullOrWhiteSpace(opts.Storage.ConnectionString))
|
||||
{
|
||||
opts.Storage.ConnectionString = factory.ConnectionString;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(opts.Storage.DatabaseName))
|
||||
{
|
||||
opts.Storage.DatabaseName = "authority-tests";
|
||||
}
|
||||
|
||||
opts.AdvisoryAi.RemoteInference.Enabled = true;
|
||||
opts.AdvisoryAi.RemoteInference.RequireTenantConsent = true;
|
||||
opts.AdvisoryAi.RemoteInference.AllowedProfiles.Clear();
|
||||
opts.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai");
|
||||
|
||||
opts.Tenants.Clear();
|
||||
opts.Tenants.Add(new AuthorityTenantOptions
|
||||
{
|
||||
Id = "tenant-default",
|
||||
DisplayName = "Tenant Default",
|
||||
AdvisoryAi =
|
||||
{
|
||||
RemoteInference =
|
||||
{
|
||||
ConsentGranted = true,
|
||||
ConsentVersion = "2025-10",
|
||||
ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z"),
|
||||
ConsentedBy = "legal@example.com"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
configureOptions?.Invoke(opts);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
var client = builder.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(schemeName);
|
||||
return client;
|
||||
}
|
||||
|
||||
private static void SeedRemoteInferenceEnabled(StellaOpsAuthorityOptions options)
|
||||
{
|
||||
options.AdvisoryAi.RemoteInference.Enabled = true;
|
||||
options.AdvisoryAi.RemoteInference.RequireTenantConsent = true;
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Clear();
|
||||
options.AdvisoryAi.RemoteInference.AllowedProfiles.Add("cloud-openai");
|
||||
}
|
||||
|
||||
private static void SeedTenantConsent(StellaOpsAuthorityOptions options)
|
||||
{
|
||||
if (options.Tenants.Count == 0)
|
||||
{
|
||||
options.Tenants.Add(new AuthorityTenantOptions { Id = "tenant-default", DisplayName = "Tenant Default" });
|
||||
}
|
||||
|
||||
var tenant = options.Tenants[0];
|
||||
tenant.Id = "tenant-default";
|
||||
tenant.DisplayName = "Tenant Default";
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentGranted = true;
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentVersion = "2025-10";
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentedAt = DateTimeOffset.Parse("2025-10-31T12:34:56Z");
|
||||
tenant.AdvisoryAi.RemoteInference.ConsentedBy = "legal@example.com";
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string value)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(value));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> ExtractProperties(BsonDocument document)
|
||||
{
|
||||
var result = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
if (!document.TryGetValue("properties", out var propertiesValue))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
foreach (var item in propertiesValue.AsBsonArray)
|
||||
{
|
||||
if (item is not BsonDocument property)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var name = property.TryGetValue("name", out var nameValue) ? nameValue.AsString : null;
|
||||
var value = property.TryGetValue("value", out var valueNode) ? valueNode.AsString : null;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
result[name] = value ?? string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static RemoteInferencePayload CreatePayload(string profile, string prompt = "Summarize remedations.")
|
||||
{
|
||||
return new RemoteInferencePayload(
|
||||
TaskType: "summary",
|
||||
Profile: profile,
|
||||
ModelId: "gpt-4o-mini",
|
||||
Prompt: prompt,
|
||||
ContextDigest: "sha256:context",
|
||||
OutputHash: "sha256:output",
|
||||
TaskId: "task-123",
|
||||
Metadata: new Dictionary<string, string>
|
||||
{
|
||||
["channel"] = "cli",
|
||||
["env"] = "test"
|
||||
});
|
||||
}
|
||||
|
||||
private sealed record RemoteInferencePayload(
|
||||
[property: JsonPropertyName("taskType")] string TaskType,
|
||||
[property: JsonPropertyName("profile")] string Profile,
|
||||
[property: JsonPropertyName("modelId")] string ModelId,
|
||||
[property: JsonPropertyName("prompt")] string Prompt,
|
||||
[property: JsonPropertyName("contextDigest")] string ContextDigest,
|
||||
[property: JsonPropertyName("outputHash")] string OutputHash,
|
||||
[property: JsonPropertyName("taskId")] string TaskId,
|
||||
[property: JsonPropertyName("metadata")] IDictionary<string, string> Metadata);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,44 +1,44 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Infrastructure;
|
||||
|
||||
internal sealed class EnvironmentVariableScope : IDisposable
|
||||
{
|
||||
private readonly Dictionary<string, string?> originals = new(StringComparer.Ordinal);
|
||||
private bool disposed;
|
||||
|
||||
public EnvironmentVariableScope(IEnumerable<KeyValuePair<string, string?>> overrides)
|
||||
{
|
||||
if (overrides is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(overrides));
|
||||
}
|
||||
|
||||
foreach (var kvp in overrides)
|
||||
{
|
||||
if (originals.ContainsKey(kvp.Key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
originals.Add(kvp.Key, Environment.GetEnvironmentVariable(kvp.Key));
|
||||
Environment.SetEnvironmentVariable(kvp.Key, kvp.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var kvp in originals)
|
||||
{
|
||||
Environment.SetEnvironmentVariable(kvp.Key, kvp.Value);
|
||||
}
|
||||
|
||||
disposed = true;
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Infrastructure;
|
||||
|
||||
internal sealed class EnvironmentVariableScope : IDisposable
|
||||
{
|
||||
private readonly Dictionary<string, string?> originals = new(StringComparer.Ordinal);
|
||||
private bool disposed;
|
||||
|
||||
public EnvironmentVariableScope(IEnumerable<KeyValuePair<string, string?>> overrides)
|
||||
{
|
||||
if (overrides is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(overrides));
|
||||
}
|
||||
|
||||
foreach (var kvp in overrides)
|
||||
{
|
||||
if (originals.ContainsKey(kvp.Key))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
originals.Add(kvp.Key, Environment.GetEnvironmentVariable(kvp.Key));
|
||||
Environment.SetEnvironmentVariable(kvp.Key, kvp.Value);
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var kvp in originals)
|
||||
{
|
||||
Environment.SetEnvironmentVariable(kvp.Key, kvp.Value);
|
||||
}
|
||||
|
||||
disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,57 +1,57 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Claims;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Infrastructure;
|
||||
|
||||
internal sealed class TestAuthHandler : AuthenticationHandler<AuthenticationSchemeOptions>
|
||||
{
|
||||
public const string SchemeName = "TestAuth";
|
||||
|
||||
public TestAuthHandler(
|
||||
IOptionsMonitor<AuthenticationSchemeOptions> options,
|
||||
ILoggerFactory logger,
|
||||
UrlEncoder encoder)
|
||||
: base(options, logger, encoder)
|
||||
{
|
||||
}
|
||||
|
||||
protected override Task<AuthenticateResult> HandleAuthenticateAsync()
|
||||
{
|
||||
var tenantHeader = Request.Headers.TryGetValue("X-Test-Tenant", out var tenantValues)
|
||||
? tenantValues.ToString()
|
||||
: "tenant-default";
|
||||
|
||||
var scopesHeader = Request.Headers.TryGetValue("X-Test-Scopes", out var scopeValues)
|
||||
? scopeValues.ToString()
|
||||
: StellaOpsScopes.AdvisoryAiOperate;
|
||||
|
||||
var claims = new List<Claim>
|
||||
{
|
||||
new Claim(StellaOpsClaimTypes.ClientId, "test-client")
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantHeader) &&
|
||||
!string.Equals(tenantHeader, "none", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
claims.Add(new Claim(StellaOpsClaimTypes.Tenant, tenantHeader.Trim()));
|
||||
}
|
||||
|
||||
var scopes = scopesHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
foreach (var scope in scopes)
|
||||
{
|
||||
claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, scope));
|
||||
}
|
||||
|
||||
var identity = new ClaimsIdentity(claims, Scheme.Name);
|
||||
var principal = new ClaimsPrincipal(identity);
|
||||
var ticket = new AuthenticationTicket(principal, Scheme.Name);
|
||||
return Task.FromResult(AuthenticateResult.Success(ticket));
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Security.Claims;
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Infrastructure;
|
||||
|
||||
internal sealed class TestAuthHandler : AuthenticationHandler<AuthenticationSchemeOptions>
|
||||
{
|
||||
public const string SchemeName = "TestAuth";
|
||||
|
||||
public TestAuthHandler(
|
||||
IOptionsMonitor<AuthenticationSchemeOptions> options,
|
||||
ILoggerFactory logger,
|
||||
UrlEncoder encoder)
|
||||
: base(options, logger, encoder)
|
||||
{
|
||||
}
|
||||
|
||||
protected override Task<AuthenticateResult> HandleAuthenticateAsync()
|
||||
{
|
||||
var tenantHeader = Request.Headers.TryGetValue("X-Test-Tenant", out var tenantValues)
|
||||
? tenantValues.ToString()
|
||||
: "tenant-default";
|
||||
|
||||
var scopesHeader = Request.Headers.TryGetValue("X-Test-Scopes", out var scopeValues)
|
||||
? scopeValues.ToString()
|
||||
: StellaOpsScopes.AdvisoryAiOperate;
|
||||
|
||||
var claims = new List<Claim>
|
||||
{
|
||||
new Claim(StellaOpsClaimTypes.ClientId, "test-client")
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantHeader) &&
|
||||
!string.Equals(tenantHeader, "none", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
claims.Add(new Claim(StellaOpsClaimTypes.Tenant, tenantHeader.Trim()));
|
||||
}
|
||||
|
||||
var scopes = scopesHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
foreach (var scope in scopes)
|
||||
{
|
||||
claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, scope));
|
||||
}
|
||||
|
||||
var identity = new ClaimsIdentity(claims, Scheme.Name);
|
||||
var principal = new ClaimsPrincipal(identity);
|
||||
var ticket = new AuthenticationTicket(principal, Scheme.Name);
|
||||
return Task.FromResult(AuthenticateResult.Success(ticket));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,259 +1,259 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Configuration;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Notifications;
|
||||
|
||||
public sealed class NotifyAckTokenRotationEndpointTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public NotifyAckTokenRotationEndpointTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Rotate_ReturnsOk_AndEmitsAuditEvent()
|
||||
{
|
||||
const string AckEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ENABLED";
|
||||
const string AckActiveKeyIdKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ACTIVEKEYID";
|
||||
const string AckKeyPathKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__KEYPATH";
|
||||
const string AckKeySourceKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__KEYSOURCE";
|
||||
const string AckAlgorithmKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ALGORITHM";
|
||||
const string WebhooksEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__WEBHOOKS__ENABLED";
|
||||
const string WebhooksAllowedHost0Key = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__WEBHOOKS__ALLOWEDHOSTS__0";
|
||||
|
||||
var tempDir = Directory.CreateTempSubdirectory("ack-rotation-success");
|
||||
try
|
||||
{
|
||||
var key1Path = Path.Combine(tempDir.FullName, "ack-key-1.pem");
|
||||
var key2Path = Path.Combine(tempDir.FullName, "ack-key-2.pem");
|
||||
CreateEcPrivateKey(key1Path);
|
||||
CreateEcPrivateKey(key2Path);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(AckEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(AckActiveKeyIdKey, "ack-key-1"),
|
||||
new KeyValuePair<string, string?>(AckKeyPathKey, key1Path),
|
||||
new KeyValuePair<string, string?>(AckKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(AckAlgorithmKey, SignatureAlgorithms.Es256),
|
||||
new KeyValuePair<string, string?>(WebhooksEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(WebhooksAllowedHost0Key, "hooks.slack.com")
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T12:00:00Z"));
|
||||
|
||||
using var scopedFactory = factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Notifications:AckTokens:Enabled"] = "true",
|
||||
["Authority:Notifications:AckTokens:ActiveKeyId"] = "ack-key-1",
|
||||
["Authority:Notifications:AckTokens:KeyPath"] = key1Path,
|
||||
["Authority:Notifications:AckTokens:KeySource"] = "file",
|
||||
["Authority:Notifications:AckTokens:Algorithm"] = SignatureAlgorithms.Es256,
|
||||
["Authority:Notifications:Webhooks:Enabled"] = "true",
|
||||
["Authority:Notifications:Webhooks:AllowedHosts:0"] = "hooks.slack.com",
|
||||
["Authority:Notifications:Escalation:Scope"] = "notify.escalate",
|
||||
["Authority:Notifications:Escalation:RequireAdminScope"] = "true"
|
||||
});
|
||||
});
|
||||
|
||||
host.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(options =>
|
||||
{
|
||||
options.Notifications.AckTokens.Enabled = true;
|
||||
options.Notifications.AckTokens.ActiveKeyId = "ack-key-1";
|
||||
options.Notifications.AckTokens.KeyPath = key1Path;
|
||||
options.Notifications.AckTokens.KeySource = "file";
|
||||
options.Notifications.AckTokens.Algorithm = SignatureAlgorithms.Es256;
|
||||
});
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(StellaOpsAuthenticationDefaults.AuthenticationScheme, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = scopedFactory.CreateClient();
|
||||
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.NotifyAdmin);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync("/notify/ack-tokens/rotate", new
|
||||
{
|
||||
keyId = "ack-key-2",
|
||||
location = key2Path
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<AckRotateResponse>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("ack-key-2", payload!.ActiveKeyId);
|
||||
Assert.Equal("ack-key-1", payload.PreviousKeyId);
|
||||
|
||||
var rotationEvent = Assert.Single(sink.Events, evt => evt.EventType == "notify.ack.key_rotated");
|
||||
Assert.Equal(AuthEventOutcome.Success, rotationEvent.Outcome);
|
||||
Assert.Contains(rotationEvent.Properties, property =>
|
||||
string.Equals(property.Name, "notify.ack.key_id", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "ack-key-2", StringComparison.Ordinal));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("ack-rotation-failure");
|
||||
try
|
||||
{
|
||||
var key1Path = Path.Combine(tempDir.FullName, "ack-key-1.pem");
|
||||
var key2Path = Path.Combine(tempDir.FullName, "ack-key-2.pem");
|
||||
CreateEcPrivateKey(key1Path);
|
||||
CreateEcPrivateKey(key2Path);
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T13:00:00Z"));
|
||||
|
||||
using var app = factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Notifications:AckTokens:Enabled"] = "true",
|
||||
["Authority:Notifications:AckTokens:ActiveKeyId"] = "ack-key-1",
|
||||
["Authority:Notifications:AckTokens:KeyPath"] = key1Path,
|
||||
["Authority:Notifications:AckTokens:KeySource"] = "file",
|
||||
["Authority:Notifications:AckTokens:Algorithm"] = SignatureAlgorithms.Es256,
|
||||
["Authority:Notifications:Webhooks:Enabled"] = "true",
|
||||
["Authority:Notifications:Webhooks:AllowedHosts:0"] = "hooks.slack.com"
|
||||
});
|
||||
});
|
||||
|
||||
host.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(options =>
|
||||
{
|
||||
options.Notifications.AckTokens.Enabled = true;
|
||||
options.Notifications.AckTokens.ActiveKeyId = "ack-key-1";
|
||||
options.Notifications.AckTokens.KeyPath = key1Path;
|
||||
options.Notifications.AckTokens.KeySource = "file";
|
||||
options.Notifications.AckTokens.Algorithm = SignatureAlgorithms.Es256;
|
||||
});
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(StellaOpsAuthenticationDefaults.AuthenticationScheme, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.NotifyAdmin);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync("/notify/ack-tokens/rotate", new
|
||||
{
|
||||
location = key2Path
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
|
||||
var failureEvent = Assert.Single(sink.Events, evt => evt.EventType == "notify.ack.key_rotation_failed");
|
||||
Assert.Equal(AuthEventOutcome.Failure, failureEvent.Outcome);
|
||||
Assert.Contains("keyId", failureEvent.Reason, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateEcPrivateKey(string path)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
File.WriteAllText(path, ecdsa.ExportECPrivateKeyPem());
|
||||
}
|
||||
|
||||
private static void TryDeleteDirectory(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(path))
|
||||
{
|
||||
Directory.Delete(path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup failures in tests.
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record AckRotateResponse(
|
||||
string ActiveKeyId,
|
||||
string? Provider,
|
||||
string? Source,
|
||||
string? Location,
|
||||
string? PreviousKeyId,
|
||||
IReadOnlyCollection<string> RetiredKeyIds);
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly ConcurrentQueue<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyCollection<AuthEventRecord> Events => events.ToArray();
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
events.Enqueue(record);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using StellaOps.Configuration;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Notifications;
|
||||
|
||||
public sealed class NotifyAckTokenRotationEndpointTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public NotifyAckTokenRotationEndpointTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Rotate_ReturnsOk_AndEmitsAuditEvent()
|
||||
{
|
||||
const string AckEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ENABLED";
|
||||
const string AckActiveKeyIdKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ACTIVEKEYID";
|
||||
const string AckKeyPathKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__KEYPATH";
|
||||
const string AckKeySourceKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__KEYSOURCE";
|
||||
const string AckAlgorithmKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__ACKTOKENS__ALGORITHM";
|
||||
const string WebhooksEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__WEBHOOKS__ENABLED";
|
||||
const string WebhooksAllowedHost0Key = "STELLAOPS_AUTHORITY_AUTHORITY__NOTIFICATIONS__WEBHOOKS__ALLOWEDHOSTS__0";
|
||||
|
||||
var tempDir = Directory.CreateTempSubdirectory("ack-rotation-success");
|
||||
try
|
||||
{
|
||||
var key1Path = Path.Combine(tempDir.FullName, "ack-key-1.pem");
|
||||
var key2Path = Path.Combine(tempDir.FullName, "ack-key-2.pem");
|
||||
CreateEcPrivateKey(key1Path);
|
||||
CreateEcPrivateKey(key2Path);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(AckEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(AckActiveKeyIdKey, "ack-key-1"),
|
||||
new KeyValuePair<string, string?>(AckKeyPathKey, key1Path),
|
||||
new KeyValuePair<string, string?>(AckKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(AckAlgorithmKey, SignatureAlgorithms.Es256),
|
||||
new KeyValuePair<string, string?>(WebhooksEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(WebhooksAllowedHost0Key, "hooks.slack.com")
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T12:00:00Z"));
|
||||
|
||||
using var scopedFactory = factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Notifications:AckTokens:Enabled"] = "true",
|
||||
["Authority:Notifications:AckTokens:ActiveKeyId"] = "ack-key-1",
|
||||
["Authority:Notifications:AckTokens:KeyPath"] = key1Path,
|
||||
["Authority:Notifications:AckTokens:KeySource"] = "file",
|
||||
["Authority:Notifications:AckTokens:Algorithm"] = SignatureAlgorithms.Es256,
|
||||
["Authority:Notifications:Webhooks:Enabled"] = "true",
|
||||
["Authority:Notifications:Webhooks:AllowedHosts:0"] = "hooks.slack.com",
|
||||
["Authority:Notifications:Escalation:Scope"] = "notify.escalate",
|
||||
["Authority:Notifications:Escalation:RequireAdminScope"] = "true"
|
||||
});
|
||||
});
|
||||
|
||||
host.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(options =>
|
||||
{
|
||||
options.Notifications.AckTokens.Enabled = true;
|
||||
options.Notifications.AckTokens.ActiveKeyId = "ack-key-1";
|
||||
options.Notifications.AckTokens.KeyPath = key1Path;
|
||||
options.Notifications.AckTokens.KeySource = "file";
|
||||
options.Notifications.AckTokens.Algorithm = SignatureAlgorithms.Es256;
|
||||
});
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(StellaOpsAuthenticationDefaults.AuthenticationScheme, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = scopedFactory.CreateClient();
|
||||
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.NotifyAdmin);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync("/notify/ack-tokens/rotate", new
|
||||
{
|
||||
keyId = "ack-key-2",
|
||||
location = key2Path
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<AckRotateResponse>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("ack-key-2", payload!.ActiveKeyId);
|
||||
Assert.Equal("ack-key-1", payload.PreviousKeyId);
|
||||
|
||||
var rotationEvent = Assert.Single(sink.Events, evt => evt.EventType == "notify.ack.key_rotated");
|
||||
Assert.Equal(AuthEventOutcome.Success, rotationEvent.Outcome);
|
||||
Assert.Contains(rotationEvent.Properties, property =>
|
||||
string.Equals(property.Name, "notify.ack.key_id", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "ack-key-2", StringComparison.Ordinal));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("ack-rotation-failure");
|
||||
try
|
||||
{
|
||||
var key1Path = Path.Combine(tempDir.FullName, "ack-key-1.pem");
|
||||
var key2Path = Path.Combine(tempDir.FullName, "ack-key-2.pem");
|
||||
CreateEcPrivateKey(key1Path);
|
||||
CreateEcPrivateKey(key2Path);
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T13:00:00Z"));
|
||||
|
||||
using var app = factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Notifications:AckTokens:Enabled"] = "true",
|
||||
["Authority:Notifications:AckTokens:ActiveKeyId"] = "ack-key-1",
|
||||
["Authority:Notifications:AckTokens:KeyPath"] = key1Path,
|
||||
["Authority:Notifications:AckTokens:KeySource"] = "file",
|
||||
["Authority:Notifications:AckTokens:Algorithm"] = SignatureAlgorithms.Es256,
|
||||
["Authority:Notifications:Webhooks:Enabled"] = "true",
|
||||
["Authority:Notifications:Webhooks:AllowedHosts:0"] = "hooks.slack.com"
|
||||
});
|
||||
});
|
||||
|
||||
host.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(options =>
|
||||
{
|
||||
options.Notifications.AckTokens.Enabled = true;
|
||||
options.Notifications.AckTokens.ActiveKeyId = "ack-key-1";
|
||||
options.Notifications.AckTokens.KeyPath = key1Path;
|
||||
options.Notifications.AckTokens.KeySource = "file";
|
||||
options.Notifications.AckTokens.Algorithm = SignatureAlgorithms.Es256;
|
||||
});
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(StellaOpsAuthenticationDefaults.AuthenticationScheme, _ => { });
|
||||
});
|
||||
});
|
||||
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.NotifyAdmin);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var response = await client.PostAsJsonAsync("/notify/ack-tokens/rotate", new
|
||||
{
|
||||
location = key2Path
|
||||
});
|
||||
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
|
||||
var failureEvent = Assert.Single(sink.Events, evt => evt.EventType == "notify.ack.key_rotation_failed");
|
||||
Assert.Equal(AuthEventOutcome.Failure, failureEvent.Outcome);
|
||||
Assert.Contains("keyId", failureEvent.Reason, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateEcPrivateKey(string path)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
File.WriteAllText(path, ecdsa.ExportECPrivateKeyPem());
|
||||
}
|
||||
|
||||
private static void TryDeleteDirectory(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(path))
|
||||
{
|
||||
Directory.Delete(path, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup failures in tests.
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record AckRotateResponse(
|
||||
string ActiveKeyId,
|
||||
string? Provider,
|
||||
string? Source,
|
||||
string? Location,
|
||||
string? PreviousKeyId,
|
||||
IReadOnlyCollection<string> RetiredKeyIds);
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly ConcurrentQueue<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyCollection<AuthEventRecord> Events => events.ToArray();
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
events.Enqueue(record);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,48 +1,48 @@
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||
|
||||
public sealed class DiscoveryMetadataTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public DiscoveryMetadataTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OpenIdDiscovery_IncludesAdvisoryAiMetadata()
|
||||
{
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
using var response = await client.GetAsync("/.well-known/openid-configuration");
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadAsStringAsync();
|
||||
using var document = JsonDocument.Parse(payload);
|
||||
|
||||
var root = document.RootElement;
|
||||
Assert.True(root.TryGetProperty("stellaops_advisory_ai_scopes_supported", out var scopesNode));
|
||||
|
||||
var scopes = scopesNode.EnumerateArray().Select(element => element.GetString()).ToArray();
|
||||
Assert.Contains(StellaOpsScopes.AdvisoryAiView, scopes);
|
||||
Assert.Contains(StellaOpsScopes.AdvisoryAiOperate, scopes);
|
||||
Assert.Contains(StellaOpsScopes.AdvisoryAiAdmin, scopes);
|
||||
|
||||
Assert.True(root.TryGetProperty("stellaops_advisory_ai_remote_inference", out var remoteNode));
|
||||
Assert.False(remoteNode.GetProperty("enabled").GetBoolean());
|
||||
Assert.True(remoteNode.GetProperty("require_tenant_consent").GetBoolean());
|
||||
|
||||
var profiles = remoteNode.GetProperty("allowed_profiles").EnumerateArray().ToArray();
|
||||
Assert.Empty(profiles);
|
||||
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||
|
||||
public sealed class DiscoveryMetadataTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public DiscoveryMetadataTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OpenIdDiscovery_IncludesAdvisoryAiMetadata()
|
||||
{
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
using var response = await client.GetAsync("/.well-known/openid-configuration");
|
||||
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadAsStringAsync();
|
||||
using var document = JsonDocument.Parse(payload);
|
||||
|
||||
var root = document.RootElement;
|
||||
Assert.True(root.TryGetProperty("stellaops_advisory_ai_scopes_supported", out var scopesNode));
|
||||
|
||||
var scopes = scopesNode.EnumerateArray().Select(element => element.GetString()).ToArray();
|
||||
Assert.Contains(StellaOpsScopes.AdvisoryAiView, scopes);
|
||||
Assert.Contains(StellaOpsScopes.AdvisoryAiOperate, scopes);
|
||||
Assert.Contains(StellaOpsScopes.AdvisoryAiAdmin, scopes);
|
||||
|
||||
Assert.True(root.TryGetProperty("stellaops_advisory_ai_remote_inference", out var remoteNode));
|
||||
Assert.False(remoteNode.GetProperty("enabled").GetBoolean());
|
||||
Assert.True(remoteNode.GetProperty("require_tenant_consent").GetBoolean());
|
||||
|
||||
var profiles = remoteNode.GetProperty("allowed_profiles").EnumerateArray().ToArray();
|
||||
Assert.Empty(profiles);
|
||||
|
||||
Assert.True(root.TryGetProperty("stellaops_airgap_scopes_supported", out var airgapNode));
|
||||
var airgapScopes = airgapNode.EnumerateArray().Select(element => element.GetString()).ToArray();
|
||||
Assert.Contains(StellaOpsScopes.AirgapSeal, airgapScopes);
|
||||
@@ -61,10 +61,10 @@ public sealed class DiscoveryMetadataTests : IClassFixture<AuthorityWebApplicati
|
||||
Assert.Contains(StellaOpsScopes.ObservabilityRead, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.TimelineRead, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.TimelineWrite, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.EvidenceCreate, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.EvidenceRead, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.EvidenceHold, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.AttestRead, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.ObservabilityIncident, observabilityScopes);
|
||||
}
|
||||
}
|
||||
Assert.Contains(StellaOpsScopes.EvidenceCreate, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.EvidenceRead, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.EvidenceHold, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.AttestRead, observabilityScopes);
|
||||
Assert.Contains(StellaOpsScopes.ObservabilityIncident, observabilityScopes);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,112 +1,112 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||
|
||||
public sealed class LegacyAuthDeprecationTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private static readonly string ExpectedDeprecationHeader = new DateTimeOffset(2025, 11, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
.UtcDateTime.ToString("r", CultureInfo.InvariantCulture);
|
||||
|
||||
private static readonly string ExpectedSunsetHeader = new DateTimeOffset(2026, 5, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
.UtcDateTime.ToString("r", CultureInfo.InvariantCulture);
|
||||
|
||||
private static readonly string ExpectedSunsetIso = new DateTimeOffset(2026, 5, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
.ToString("O", CultureInfo.InvariantCulture);
|
||||
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public LegacyAuthDeprecationTests(AuthorityWebApplicationFactory factory)
|
||||
=> this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
|
||||
[Fact]
|
||||
public async Task LegacyTokenEndpoint_IncludesDeprecationHeaders()
|
||||
{
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
using var response = await client.PostAsync(
|
||||
"/oauth/token",
|
||||
new FormUrlEncodedContent(new Dictionary<string, string>
|
||||
{
|
||||
["grant_type"] = "client_credentials"
|
||||
}));
|
||||
|
||||
Assert.NotNull(response);
|
||||
Assert.True(response.Headers.TryGetValues("Deprecation", out var deprecationValues));
|
||||
Assert.Contains(ExpectedDeprecationHeader, deprecationValues);
|
||||
|
||||
Assert.True(response.Headers.TryGetValues("Sunset", out var sunsetValues));
|
||||
Assert.Contains(ExpectedSunsetHeader, sunsetValues);
|
||||
|
||||
Assert.True(response.Headers.TryGetValues("Warning", out var warningValues));
|
||||
Assert.Contains(warningValues, warning => warning.Contains("Legacy Authority endpoint", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
Assert.True(response.Headers.TryGetValues("Link", out var linkValues));
|
||||
Assert.Contains(linkValues, value => value.Contains("rel=\"sunset\"", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LegacyTokenEndpoint_EmitsAuditEvent()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
|
||||
using var customFactory = factory.WithWebHostBuilder(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
});
|
||||
});
|
||||
|
||||
using var client = customFactory.CreateClient();
|
||||
|
||||
using var response = await client.PostAsync(
|
||||
"/oauth/token",
|
||||
new FormUrlEncodedContent(new Dictionary<string, string>
|
||||
{
|
||||
["grant_type"] = "client_credentials"
|
||||
}));
|
||||
|
||||
Assert.NotNull(response);
|
||||
|
||||
var record = Assert.Single(sink.Events);
|
||||
Assert.Equal("authority.api.legacy_endpoint", record.EventType);
|
||||
|
||||
Assert.Contains(record.Properties, property =>
|
||||
string.Equals(property.Name, "legacy.endpoint.original", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "/oauth/token", StringComparison.Ordinal));
|
||||
|
||||
Assert.Contains(record.Properties, property =>
|
||||
string.Equals(property.Name, "legacy.endpoint.canonical", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "/token", StringComparison.Ordinal));
|
||||
|
||||
Assert.Contains(record.Properties, property =>
|
||||
string.Equals(property.Name, "legacy.sunset_at", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, ExpectedSunsetIso, StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly ConcurrentQueue<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyCollection<AuthEventRecord> Events => events.ToArray();
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
events.Enqueue(record);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.OpenIddict;
|
||||
|
||||
public sealed class LegacyAuthDeprecationTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private static readonly string ExpectedDeprecationHeader = new DateTimeOffset(2025, 11, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
.UtcDateTime.ToString("r", CultureInfo.InvariantCulture);
|
||||
|
||||
private static readonly string ExpectedSunsetHeader = new DateTimeOffset(2026, 5, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
.UtcDateTime.ToString("r", CultureInfo.InvariantCulture);
|
||||
|
||||
private static readonly string ExpectedSunsetIso = new DateTimeOffset(2026, 5, 1, 0, 0, 0, TimeSpan.Zero)
|
||||
.ToString("O", CultureInfo.InvariantCulture);
|
||||
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
|
||||
public LegacyAuthDeprecationTests(AuthorityWebApplicationFactory factory)
|
||||
=> this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
|
||||
[Fact]
|
||||
public async Task LegacyTokenEndpoint_IncludesDeprecationHeaders()
|
||||
{
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
using var response = await client.PostAsync(
|
||||
"/oauth/token",
|
||||
new FormUrlEncodedContent(new Dictionary<string, string>
|
||||
{
|
||||
["grant_type"] = "client_credentials"
|
||||
}));
|
||||
|
||||
Assert.NotNull(response);
|
||||
Assert.True(response.Headers.TryGetValues("Deprecation", out var deprecationValues));
|
||||
Assert.Contains(ExpectedDeprecationHeader, deprecationValues);
|
||||
|
||||
Assert.True(response.Headers.TryGetValues("Sunset", out var sunsetValues));
|
||||
Assert.Contains(ExpectedSunsetHeader, sunsetValues);
|
||||
|
||||
Assert.True(response.Headers.TryGetValues("Warning", out var warningValues));
|
||||
Assert.Contains(warningValues, warning => warning.Contains("Legacy Authority endpoint", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
Assert.True(response.Headers.TryGetValues("Link", out var linkValues));
|
||||
Assert.Contains(linkValues, value => value.Contains("rel=\"sunset\"", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LegacyTokenEndpoint_EmitsAuditEvent()
|
||||
{
|
||||
var sink = new RecordingAuthEventSink();
|
||||
|
||||
using var customFactory = factory.WithWebHostBuilder(builder =>
|
||||
{
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
});
|
||||
});
|
||||
|
||||
using var client = customFactory.CreateClient();
|
||||
|
||||
using var response = await client.PostAsync(
|
||||
"/oauth/token",
|
||||
new FormUrlEncodedContent(new Dictionary<string, string>
|
||||
{
|
||||
["grant_type"] = "client_credentials"
|
||||
}));
|
||||
|
||||
Assert.NotNull(response);
|
||||
|
||||
var record = Assert.Single(sink.Events);
|
||||
Assert.Equal("authority.api.legacy_endpoint", record.EventType);
|
||||
|
||||
Assert.Contains(record.Properties, property =>
|
||||
string.Equals(property.Name, "legacy.endpoint.original", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "/oauth/token", StringComparison.Ordinal));
|
||||
|
||||
Assert.Contains(record.Properties, property =>
|
||||
string.Equals(property.Name, "legacy.endpoint.canonical", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, "/token", StringComparison.Ordinal));
|
||||
|
||||
Assert.Contains(record.Properties, property =>
|
||||
string.Equals(property.Name, "legacy.sunset_at", StringComparison.Ordinal) &&
|
||||
string.Equals(property.Value.Value, ExpectedSunsetIso, StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly ConcurrentQueue<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyCollection<AuthEventRecord> Events => events.ToArray();
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
events.Enqueue(record);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,21 +1,21 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Authority\StellaOps.Authority.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Authority\StellaOps.Authority.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Compile Include="../../../../tests/shared/OpenSslLegacyShim.cs" Link="Infrastructure/OpenSslLegacyShim.cs" />
|
||||
<None Include="../../../../tests/native/openssl-1.1/linux-x64/*" Link="native/linux-x64/%(Filename)%(Extension)" CopyToOutputDirectory="PreserveNewest" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -1,457 +1,457 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Authority.Vulnerability.Attachments;
|
||||
using StellaOps.Authority.Vulnerability.Workflow;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Vulnerability;
|
||||
|
||||
public sealed class VulnWorkflowTokenEndpointTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
private const string SigningEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ENABLED";
|
||||
private const string SigningActiveKeyIdKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ACTIVEKEYID";
|
||||
private const string SigningKeyPathKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__KEYPATH";
|
||||
private const string SigningKeySourceKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__KEYSOURCE";
|
||||
private const string SigningAlgorithmKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ALGORITHM";
|
||||
|
||||
public VulnWorkflowTokenEndpointTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IssueAndVerifyWorkflowToken_SucceedsAndAudits()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("workflow-token-success");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "signing-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "workflow-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:00:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "workflow-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnOperate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new
|
||||
{
|
||||
tenant = "tenant-default",
|
||||
actions = new[] { "assign", "comment" },
|
||||
context = new Dictionary<string, string> { ["finding_id"] = "F-123" },
|
||||
nonce = "workflow-nonce-123456",
|
||||
expiresInSeconds = 600
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/issue", issuePayload);
|
||||
var issueBody = await issueResponse.Content.ReadAsStringAsync();
|
||||
Assert.True(issueResponse.StatusCode == HttpStatusCode.OK, $"Issue anti-forgery failed: {issueResponse.StatusCode} {issueBody}");
|
||||
|
||||
var issued = System.Text.Json.JsonSerializer.Deserialize<VulnWorkflowAntiForgeryIssueResponse>(
|
||||
issueBody,
|
||||
new System.Text.Json.JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
Assert.NotNull(issued);
|
||||
Assert.Equal("workflow-nonce-123456", issued!.Nonce);
|
||||
Assert.Contains("assign", issued.Actions);
|
||||
Assert.Contains("comment", issued.Actions);
|
||||
|
||||
var verifyPayload = new VulnWorkflowAntiForgeryVerifyRequest
|
||||
{
|
||||
Token = issued.Token,
|
||||
RequiredAction = "assign",
|
||||
Tenant = "tenant-default",
|
||||
Nonce = "workflow-nonce-123456"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/verify", verifyPayload);
|
||||
var verifyBody = await verifyResponse.Content.ReadAsStringAsync();
|
||||
Assert.True(verifyResponse.StatusCode == HttpStatusCode.OK, $"Verify anti-forgery failed: {verifyResponse.StatusCode} {verifyBody}");
|
||||
|
||||
var verified = System.Text.Json.JsonSerializer.Deserialize<VulnWorkflowAntiForgeryVerifyResponse>(
|
||||
verifyBody,
|
||||
new System.Text.Json.JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
Assert.NotNull(verified);
|
||||
Assert.Equal("tenant-default", verified!.Tenant);
|
||||
Assert.Equal("workflow-nonce-123456", verified.Nonce);
|
||||
|
||||
var issuedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.issued");
|
||||
Assert.Contains(issuedEvent.Properties, property => property.Name == "vuln.workflow.actor");
|
||||
|
||||
var verifiedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.verified");
|
||||
Assert.Contains(verifiedEvent.Properties, property => property.Name == "vuln.workflow.nonce" && property.Value.Value == "workflow-nonce-123456");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IssueWorkflowToken_ReturnsBadRequest_WhenActionsMissing()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("workflow-token-missing-actions");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "signing-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "workflow-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:10:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "workflow-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnOperate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new
|
||||
{
|
||||
tenant = "tenant-default",
|
||||
actions = Array.Empty<string>()
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
|
||||
var error = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(error);
|
||||
Assert.Equal("invalid_request", error!["error"]);
|
||||
Assert.Contains("action", error["message"], StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
Assert.DoesNotContain(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.issued");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyWorkflowToken_ReturnsBadRequest_WhenActionNotPermitted()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("workflow-token-invalid-action");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "signing-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "workflow-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:20:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "workflow-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnOperate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new
|
||||
{
|
||||
tenant = "tenant-default",
|
||||
actions = new[] { "assign" },
|
||||
nonce = "workflow-nonce-789012"
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.OK, issueResponse.StatusCode);
|
||||
var issued = await issueResponse.Content.ReadFromJsonAsync<VulnWorkflowAntiForgeryIssueResponse>();
|
||||
Assert.NotNull(issued);
|
||||
|
||||
var verifyPayload = new VulnWorkflowAntiForgeryVerifyRequest
|
||||
{
|
||||
Token = issued!.Token,
|
||||
RequiredAction = "close",
|
||||
Tenant = "tenant-default",
|
||||
Nonce = "workflow-nonce-789012"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/verify", verifyPayload);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, verifyResponse.StatusCode);
|
||||
|
||||
var error = await verifyResponse.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(error);
|
||||
Assert.Equal("invalid_token", error!["error"]);
|
||||
Assert.Contains("Token does not permit action", error["message"], StringComparison.Ordinal);
|
||||
|
||||
Assert.Single(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.issued");
|
||||
Assert.DoesNotContain(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.verified");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IssueAndVerifyAttachmentToken_SucceedsAndAudits()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("attachment-token-success");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "attachment-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "attachment-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T11:00:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "attachment-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnInvestigate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new VulnAttachmentTokenIssueRequest
|
||||
{
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-001",
|
||||
AttachmentId = "attach-123",
|
||||
FindingId = "find-456",
|
||||
ContentHash = "sha256:abc123",
|
||||
ContentType = "application/pdf",
|
||||
Metadata = new Dictionary<string, string?> { ["origin"] = "vuln-workflow" }
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.OK, issueResponse.StatusCode);
|
||||
var issued = await issueResponse.Content.ReadFromJsonAsync<VulnAttachmentTokenIssueResponse>();
|
||||
Assert.NotNull(issued);
|
||||
Assert.Equal("attach-123", issued!.AttachmentId);
|
||||
|
||||
var verifyPayload = new VulnAttachmentTokenVerifyRequest
|
||||
{
|
||||
Token = issued.Token,
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-001",
|
||||
AttachmentId = "attach-123"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/verify", verifyPayload);
|
||||
Assert.Equal(HttpStatusCode.OK, verifyResponse.StatusCode);
|
||||
var verified = await verifyResponse.Content.ReadFromJsonAsync<VulnAttachmentTokenVerifyResponse>();
|
||||
Assert.NotNull(verified);
|
||||
Assert.Equal("ledger-hash-001", verified!.LedgerEventHash);
|
||||
|
||||
var issuedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.attachment.token.issued");
|
||||
Assert.Contains(issuedEvent.Properties, property => property.Name == "vuln.attachment.ledger_hash" && property.Value.Value == "ledger-hash-001");
|
||||
|
||||
var verifiedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.attachment.token.verified");
|
||||
Assert.Contains(verifiedEvent.Properties, property => property.Name == "vuln.attachment.ledger_hash" && property.Value.Value == "ledger-hash-001");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAttachmentToken_ReturnsBadRequest_WhenLedgerMismatch()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("attachment-token-ledger-mismatch");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "attachment-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "attachment-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T11:10:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "attachment-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnInvestigate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new VulnAttachmentTokenIssueRequest
|
||||
{
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-001",
|
||||
AttachmentId = "attach-123"
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.OK, issueResponse.StatusCode);
|
||||
var issued = await issueResponse.Content.ReadFromJsonAsync<VulnAttachmentTokenIssueResponse>();
|
||||
Assert.NotNull(issued);
|
||||
|
||||
var verifyPayload = new VulnAttachmentTokenVerifyRequest
|
||||
{
|
||||
Token = issued!.Token,
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-999",
|
||||
AttachmentId = "attach-123"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/verify", verifyPayload);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, verifyResponse.StatusCode);
|
||||
|
||||
var error = await verifyResponse.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(error);
|
||||
Assert.Equal("invalid_token", error!["error"]);
|
||||
Assert.Contains("ledger reference", error["message"], StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
Assert.Single(sink.Events, evt => evt.EventType == "vuln.attachment.token.issued");
|
||||
Assert.DoesNotContain(sink.Events, evt => evt.EventType == "vuln.attachment.token.verified");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
private WebApplicationFactory<Program> CreateSignedAuthorityApp(
|
||||
RecordingAuthEventSink sink,
|
||||
FakeTimeProvider timeProvider,
|
||||
string signingKeyId,
|
||||
string signingKeyPath)
|
||||
{
|
||||
return factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Signing:Enabled"] = "true",
|
||||
["Authority:Signing:ActiveKeyId"] = signingKeyId,
|
||||
["Authority:Signing:KeyPath"] = signingKeyPath,
|
||||
["Authority:Signing:KeySource"] = "file",
|
||||
["Authority:Signing:Algorithm"] = SignatureAlgorithms.Es256
|
||||
});
|
||||
});
|
||||
|
||||
host.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(options =>
|
||||
{
|
||||
options.Signing.Enabled = true;
|
||||
options.Signing.ActiveKeyId = signingKeyId;
|
||||
options.Signing.KeyPath = signingKeyPath;
|
||||
options.Signing.KeySource = "file";
|
||||
options.Signing.Algorithm = SignatureAlgorithms.Es256;
|
||||
options.VulnerabilityExplorer.Workflow.AntiForgery.Enabled = true;
|
||||
options.VulnerabilityExplorer.Attachments.Enabled = true;
|
||||
});
|
||||
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(StellaOpsAuthenticationDefaults.AuthenticationScheme, _ => { });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private static void CreateEcPrivateKey(string path)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
File.WriteAllText(path, ecdsa.ExportECPrivateKeyPem());
|
||||
}
|
||||
|
||||
private static void TryDeleteDirectory(string directory)
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignored during cleanup.
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly List<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyList<AuthEventRecord> Events => events;
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
events.Add(record);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Authority;
|
||||
using StellaOps.Authority.Tests.Infrastructure;
|
||||
using StellaOps.Authority.Vulnerability.Attachments;
|
||||
using StellaOps.Authority.Vulnerability.Workflow;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Authority.Tests.Vulnerability;
|
||||
|
||||
public sealed class VulnWorkflowTokenEndpointTests : IClassFixture<AuthorityWebApplicationFactory>
|
||||
{
|
||||
private readonly AuthorityWebApplicationFactory factory;
|
||||
private const string SigningEnabledKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ENABLED";
|
||||
private const string SigningActiveKeyIdKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ACTIVEKEYID";
|
||||
private const string SigningKeyPathKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__KEYPATH";
|
||||
private const string SigningKeySourceKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__KEYSOURCE";
|
||||
private const string SigningAlgorithmKey = "STELLAOPS_AUTHORITY_AUTHORITY__SIGNING__ALGORITHM";
|
||||
|
||||
public VulnWorkflowTokenEndpointTests(AuthorityWebApplicationFactory factory)
|
||||
{
|
||||
this.factory = factory ?? throw new ArgumentNullException(nameof(factory));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IssueAndVerifyWorkflowToken_SucceedsAndAudits()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("workflow-token-success");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "signing-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "workflow-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:00:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "workflow-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnOperate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new
|
||||
{
|
||||
tenant = "tenant-default",
|
||||
actions = new[] { "assign", "comment" },
|
||||
context = new Dictionary<string, string> { ["finding_id"] = "F-123" },
|
||||
nonce = "workflow-nonce-123456",
|
||||
expiresInSeconds = 600
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/issue", issuePayload);
|
||||
var issueBody = await issueResponse.Content.ReadAsStringAsync();
|
||||
Assert.True(issueResponse.StatusCode == HttpStatusCode.OK, $"Issue anti-forgery failed: {issueResponse.StatusCode} {issueBody}");
|
||||
|
||||
var issued = System.Text.Json.JsonSerializer.Deserialize<VulnWorkflowAntiForgeryIssueResponse>(
|
||||
issueBody,
|
||||
new System.Text.Json.JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
Assert.NotNull(issued);
|
||||
Assert.Equal("workflow-nonce-123456", issued!.Nonce);
|
||||
Assert.Contains("assign", issued.Actions);
|
||||
Assert.Contains("comment", issued.Actions);
|
||||
|
||||
var verifyPayload = new VulnWorkflowAntiForgeryVerifyRequest
|
||||
{
|
||||
Token = issued.Token,
|
||||
RequiredAction = "assign",
|
||||
Tenant = "tenant-default",
|
||||
Nonce = "workflow-nonce-123456"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/verify", verifyPayload);
|
||||
var verifyBody = await verifyResponse.Content.ReadAsStringAsync();
|
||||
Assert.True(verifyResponse.StatusCode == HttpStatusCode.OK, $"Verify anti-forgery failed: {verifyResponse.StatusCode} {verifyBody}");
|
||||
|
||||
var verified = System.Text.Json.JsonSerializer.Deserialize<VulnWorkflowAntiForgeryVerifyResponse>(
|
||||
verifyBody,
|
||||
new System.Text.Json.JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
Assert.NotNull(verified);
|
||||
Assert.Equal("tenant-default", verified!.Tenant);
|
||||
Assert.Equal("workflow-nonce-123456", verified.Nonce);
|
||||
|
||||
var issuedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.issued");
|
||||
Assert.Contains(issuedEvent.Properties, property => property.Name == "vuln.workflow.actor");
|
||||
|
||||
var verifiedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.verified");
|
||||
Assert.Contains(verifiedEvent.Properties, property => property.Name == "vuln.workflow.nonce" && property.Value.Value == "workflow-nonce-123456");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IssueWorkflowToken_ReturnsBadRequest_WhenActionsMissing()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("workflow-token-missing-actions");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "signing-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "workflow-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:10:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "workflow-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnOperate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new
|
||||
{
|
||||
tenant = "tenant-default",
|
||||
actions = Array.Empty<string>()
|
||||
};
|
||||
|
||||
var response = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode);
|
||||
|
||||
var error = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(error);
|
||||
Assert.Equal("invalid_request", error!["error"]);
|
||||
Assert.Contains("action", error["message"], StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
Assert.DoesNotContain(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.issued");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyWorkflowToken_ReturnsBadRequest_WhenActionNotPermitted()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("workflow-token-invalid-action");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "signing-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "workflow-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T09:20:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "workflow-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnOperate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new
|
||||
{
|
||||
tenant = "tenant-default",
|
||||
actions = new[] { "assign" },
|
||||
nonce = "workflow-nonce-789012"
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.OK, issueResponse.StatusCode);
|
||||
var issued = await issueResponse.Content.ReadFromJsonAsync<VulnWorkflowAntiForgeryIssueResponse>();
|
||||
Assert.NotNull(issued);
|
||||
|
||||
var verifyPayload = new VulnWorkflowAntiForgeryVerifyRequest
|
||||
{
|
||||
Token = issued!.Token,
|
||||
RequiredAction = "close",
|
||||
Tenant = "tenant-default",
|
||||
Nonce = "workflow-nonce-789012"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/workflow/anti-forgery/verify", verifyPayload);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, verifyResponse.StatusCode);
|
||||
|
||||
var error = await verifyResponse.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(error);
|
||||
Assert.Equal("invalid_token", error!["error"]);
|
||||
Assert.Contains("Token does not permit action", error["message"], StringComparison.Ordinal);
|
||||
|
||||
Assert.Single(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.issued");
|
||||
Assert.DoesNotContain(sink.Events, evt => evt.EventType == "vuln.workflow.csrf.verified");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IssueAndVerifyAttachmentToken_SucceedsAndAudits()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("attachment-token-success");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "attachment-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "attachment-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T11:00:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "attachment-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnInvestigate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new VulnAttachmentTokenIssueRequest
|
||||
{
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-001",
|
||||
AttachmentId = "attach-123",
|
||||
FindingId = "find-456",
|
||||
ContentHash = "sha256:abc123",
|
||||
ContentType = "application/pdf",
|
||||
Metadata = new Dictionary<string, string?> { ["origin"] = "vuln-workflow" }
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.OK, issueResponse.StatusCode);
|
||||
var issued = await issueResponse.Content.ReadFromJsonAsync<VulnAttachmentTokenIssueResponse>();
|
||||
Assert.NotNull(issued);
|
||||
Assert.Equal("attach-123", issued!.AttachmentId);
|
||||
|
||||
var verifyPayload = new VulnAttachmentTokenVerifyRequest
|
||||
{
|
||||
Token = issued.Token,
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-001",
|
||||
AttachmentId = "attach-123"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/verify", verifyPayload);
|
||||
Assert.Equal(HttpStatusCode.OK, verifyResponse.StatusCode);
|
||||
var verified = await verifyResponse.Content.ReadFromJsonAsync<VulnAttachmentTokenVerifyResponse>();
|
||||
Assert.NotNull(verified);
|
||||
Assert.Equal("ledger-hash-001", verified!.LedgerEventHash);
|
||||
|
||||
var issuedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.attachment.token.issued");
|
||||
Assert.Contains(issuedEvent.Properties, property => property.Name == "vuln.attachment.ledger_hash" && property.Value.Value == "ledger-hash-001");
|
||||
|
||||
var verifiedEvent = Assert.Single(sink.Events, evt => evt.EventType == "vuln.attachment.token.verified");
|
||||
Assert.Contains(verifiedEvent.Properties, property => property.Name == "vuln.attachment.ledger_hash" && property.Value.Value == "ledger-hash-001");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAttachmentToken_ReturnsBadRequest_WhenLedgerMismatch()
|
||||
{
|
||||
var tempDir = Directory.CreateTempSubdirectory("attachment-token-ledger-mismatch");
|
||||
var keyPath = Path.Combine(tempDir.FullName, "attachment-key.pem");
|
||||
|
||||
try
|
||||
{
|
||||
CreateEcPrivateKey(keyPath);
|
||||
|
||||
using var env = new EnvironmentVariableScope(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>(SigningEnabledKey, "true"),
|
||||
new KeyValuePair<string, string?>(SigningActiveKeyIdKey, "attachment-key"),
|
||||
new KeyValuePair<string, string?>(SigningKeyPathKey, keyPath),
|
||||
new KeyValuePair<string, string?>(SigningKeySourceKey, "file"),
|
||||
new KeyValuePair<string, string?>(SigningAlgorithmKey, SignatureAlgorithms.Es256)
|
||||
});
|
||||
|
||||
var sink = new RecordingAuthEventSink();
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-11-02T11:10:00Z"));
|
||||
|
||||
using var app = CreateSignedAuthorityApp(sink, timeProvider, "attachment-key", keyPath);
|
||||
using var client = app.CreateClient();
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthHandler.SchemeName);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Scopes", StellaOpsScopes.VulnInvestigate);
|
||||
client.DefaultRequestHeaders.Add("X-Test-Tenant", "tenant-default");
|
||||
client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default");
|
||||
|
||||
var issuePayload = new VulnAttachmentTokenIssueRequest
|
||||
{
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-001",
|
||||
AttachmentId = "attach-123"
|
||||
};
|
||||
|
||||
var issueResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/issue", issuePayload);
|
||||
Assert.Equal(HttpStatusCode.OK, issueResponse.StatusCode);
|
||||
var issued = await issueResponse.Content.ReadFromJsonAsync<VulnAttachmentTokenIssueResponse>();
|
||||
Assert.NotNull(issued);
|
||||
|
||||
var verifyPayload = new VulnAttachmentTokenVerifyRequest
|
||||
{
|
||||
Token = issued!.Token,
|
||||
Tenant = "tenant-default",
|
||||
LedgerEventHash = "ledger-hash-999",
|
||||
AttachmentId = "attach-123"
|
||||
};
|
||||
|
||||
var verifyResponse = await client.PostAsJsonAsync("/vuln/attachments/tokens/verify", verifyPayload);
|
||||
Assert.Equal(HttpStatusCode.BadRequest, verifyResponse.StatusCode);
|
||||
|
||||
var error = await verifyResponse.Content.ReadFromJsonAsync<Dictionary<string, string>>();
|
||||
Assert.NotNull(error);
|
||||
Assert.Equal("invalid_token", error!["error"]);
|
||||
Assert.Contains("ledger reference", error["message"], StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
Assert.Single(sink.Events, evt => evt.EventType == "vuln.attachment.token.issued");
|
||||
Assert.DoesNotContain(sink.Events, evt => evt.EventType == "vuln.attachment.token.verified");
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDeleteDirectory(tempDir.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
private WebApplicationFactory<Program> CreateSignedAuthorityApp(
|
||||
RecordingAuthEventSink sink,
|
||||
FakeTimeProvider timeProvider,
|
||||
string signingKeyId,
|
||||
string signingKeyPath)
|
||||
{
|
||||
return factory.WithWebHostBuilder(host =>
|
||||
{
|
||||
host.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
configuration.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["Authority:Signing:Enabled"] = "true",
|
||||
["Authority:Signing:ActiveKeyId"] = signingKeyId,
|
||||
["Authority:Signing:KeyPath"] = signingKeyPath,
|
||||
["Authority:Signing:KeySource"] = "file",
|
||||
["Authority:Signing:Algorithm"] = SignatureAlgorithms.Es256
|
||||
});
|
||||
});
|
||||
|
||||
host.ConfigureServices(services =>
|
||||
{
|
||||
services.RemoveAll<IAuthEventSink>();
|
||||
services.AddSingleton<IAuthEventSink>(sink);
|
||||
services.Replace(ServiceDescriptor.Singleton<TimeProvider>(timeProvider));
|
||||
services.PostConfigure<StellaOpsAuthorityOptions>(options =>
|
||||
{
|
||||
options.Signing.Enabled = true;
|
||||
options.Signing.ActiveKeyId = signingKeyId;
|
||||
options.Signing.KeyPath = signingKeyPath;
|
||||
options.Signing.KeySource = "file";
|
||||
options.Signing.Algorithm = SignatureAlgorithms.Es256;
|
||||
options.VulnerabilityExplorer.Workflow.AntiForgery.Enabled = true;
|
||||
options.VulnerabilityExplorer.Attachments.Enabled = true;
|
||||
});
|
||||
|
||||
var authBuilder = services.AddAuthentication(options =>
|
||||
{
|
||||
options.DefaultAuthenticateScheme = TestAuthHandler.SchemeName;
|
||||
options.DefaultChallengeScheme = TestAuthHandler.SchemeName;
|
||||
});
|
||||
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(TestAuthHandler.SchemeName, _ => { });
|
||||
authBuilder.AddScheme<AuthenticationSchemeOptions, TestAuthHandler>(StellaOpsAuthenticationDefaults.AuthenticationScheme, _ => { });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private static void CreateEcPrivateKey(string path)
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
|
||||
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
|
||||
File.WriteAllText(path, ecdsa.ExportECPrivateKeyPem());
|
||||
}
|
||||
|
||||
private static void TryDeleteDirectory(string directory)
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignored during cleanup.
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class RecordingAuthEventSink : IAuthEventSink
|
||||
{
|
||||
private readonly List<AuthEventRecord> events = new();
|
||||
|
||||
public IReadOnlyList<AuthEventRecord> Events => events;
|
||||
|
||||
public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
events.Add(record);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,254 +1,254 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Net.Http.Headers;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
namespace StellaOps.Authority;
|
||||
|
||||
internal sealed class LegacyAuthDeprecationMiddleware
|
||||
{
|
||||
private const string LegacyEventType = "authority.api.legacy_endpoint";
|
||||
private const string SunsetHeaderName = "Sunset";
|
||||
|
||||
private static readonly IReadOnlyDictionary<PathString, PathString> LegacyEndpointMap =
|
||||
new Dictionary<PathString, PathString>(PathStringComparer.Instance)
|
||||
{
|
||||
[new PathString("/oauth/token")] = new PathString("/token"),
|
||||
[new PathString("/oauth/introspect")] = new PathString("/introspect"),
|
||||
[new PathString("/oauth/revoke")] = new PathString("/revoke")
|
||||
};
|
||||
|
||||
private readonly RequestDelegate next;
|
||||
private readonly AuthorityLegacyAuthEndpointOptions options;
|
||||
private readonly IAuthEventSink auditSink;
|
||||
private readonly TimeProvider clock;
|
||||
private readonly ILogger<LegacyAuthDeprecationMiddleware> logger;
|
||||
|
||||
public LegacyAuthDeprecationMiddleware(
|
||||
RequestDelegate next,
|
||||
IOptions<StellaOpsAuthorityOptions> authorityOptions,
|
||||
IAuthEventSink auditSink,
|
||||
TimeProvider clock,
|
||||
ILogger<LegacyAuthDeprecationMiddleware> logger)
|
||||
{
|
||||
this.next = next ?? throw new ArgumentNullException(nameof(next));
|
||||
if (authorityOptions is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(authorityOptions));
|
||||
}
|
||||
|
||||
options = authorityOptions.Value.ApiLifecycle.LegacyAuth ??
|
||||
throw new InvalidOperationException("Authority legacy auth endpoint options are not configured.");
|
||||
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task InvokeAsync(HttpContext context)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
if (!options.Enabled)
|
||||
{
|
||||
await next(context).ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!TryResolveLegacyPath(context.Request.Path, out var canonicalPath))
|
||||
{
|
||||
await next(context).ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
|
||||
var originalPath = context.Request.Path;
|
||||
context.Request.Path = canonicalPath;
|
||||
|
||||
logger.LogInformation(
|
||||
"Legacy Authority endpoint {OriginalPath} invoked; routing to {CanonicalPath} and emitting deprecation headers.",
|
||||
originalPath,
|
||||
canonicalPath);
|
||||
|
||||
AppendDeprecationHeaders(context.Response);
|
||||
|
||||
await next(context).ConfigureAwait(false);
|
||||
|
||||
await EmitAuditAsync(context, originalPath, canonicalPath).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static bool TryResolveLegacyPath(PathString path, out PathString canonicalPath)
|
||||
{
|
||||
if (LegacyEndpointMap.TryGetValue(Normalize(path), out canonicalPath))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
canonicalPath = PathString.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
private static PathString Normalize(PathString value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return PathString.Empty;
|
||||
}
|
||||
|
||||
var trimmed = value.Value!.TrimEnd('/');
|
||||
return new PathString(trimmed.Length == 0 ? "/" : trimmed.ToLowerInvariant());
|
||||
}
|
||||
|
||||
private void AppendDeprecationHeaders(HttpResponse response)
|
||||
{
|
||||
if (response.HasStarted)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var deprecation = FormatHttpDate(options.DeprecationDate);
|
||||
response.Headers["Deprecation"] = deprecation;
|
||||
|
||||
var sunset = FormatHttpDate(options.SunsetDate);
|
||||
response.Headers[SunsetHeaderName] = sunset;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.DocumentationUrl))
|
||||
{
|
||||
var linkValue = $"<{options.DocumentationUrl}>; rel=\"sunset\"";
|
||||
response.Headers.Append(HeaderNames.Link, linkValue);
|
||||
}
|
||||
|
||||
var warning = $"299 - \"Legacy Authority endpoint will be removed after {sunset}. Migrate to canonical endpoints before the sunset date.\"";
|
||||
response.Headers[HeaderNames.Warning] = warning;
|
||||
}
|
||||
|
||||
private async Task EmitAuditAsync(HttpContext context, PathString originalPath, PathString canonicalPath)
|
||||
{
|
||||
try
|
||||
{
|
||||
var correlation = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier;
|
||||
|
||||
var network = BuildNetwork(context);
|
||||
|
||||
var record = new AuthEventRecord
|
||||
{
|
||||
EventType = LegacyEventType,
|
||||
OccurredAt = clock.GetUtcNow(),
|
||||
CorrelationId = correlation,
|
||||
Outcome = AuthEventOutcome.Success,
|
||||
Reason = null,
|
||||
Subject = null,
|
||||
Client = null,
|
||||
Tenant = ClassifiedString.Empty,
|
||||
Project = ClassifiedString.Empty,
|
||||
Scopes = Array.Empty<string>(),
|
||||
Network = network,
|
||||
Properties = BuildProperties(
|
||||
("legacy.endpoint.original", originalPath.Value),
|
||||
("legacy.endpoint.canonical", canonicalPath.Value),
|
||||
("legacy.deprecation_at", options.DeprecationDate.ToString("O", CultureInfo.InvariantCulture)),
|
||||
("legacy.sunset_at", options.SunsetDate.ToString("O", CultureInfo.InvariantCulture)),
|
||||
("http.status_code", context.Response.StatusCode.ToString(CultureInfo.InvariantCulture)))
|
||||
};
|
||||
|
||||
await auditSink.WriteAsync(record, context.RequestAborted).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to emit legacy auth endpoint audit event.");
|
||||
}
|
||||
}
|
||||
|
||||
private static AuthEventNetwork? BuildNetwork(HttpContext context)
|
||||
{
|
||||
var remote = context.Connection.RemoteIpAddress?.ToString();
|
||||
var forwarded = context.Request.Headers["X-Forwarded-For"].ToString();
|
||||
var userAgent = context.Request.Headers.UserAgent.ToString();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(remote) &&
|
||||
string.IsNullOrWhiteSpace(forwarded) &&
|
||||
string.IsNullOrWhiteSpace(userAgent))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new AuthEventNetwork
|
||||
{
|
||||
RemoteAddress = ClassifiedString.Personal(Normalize(remote)),
|
||||
ForwardedFor = ClassifiedString.Personal(Normalize(forwarded)),
|
||||
UserAgent = ClassifiedString.Personal(Normalize(userAgent))
|
||||
};
|
||||
}
|
||||
|
||||
private static string? Normalize(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
return trimmed.Length == 0 ? null : trimmed;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AuthEventProperty> BuildProperties(params (string Name, string? Value)[] entries)
|
||||
{
|
||||
if (entries.Length == 0)
|
||||
{
|
||||
return Array.Empty<AuthEventProperty>();
|
||||
}
|
||||
|
||||
var list = new List<AuthEventProperty>(entries.Length);
|
||||
foreach (var (name, value) in entries)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(new AuthEventProperty
|
||||
{
|
||||
Name = name,
|
||||
Value = string.IsNullOrWhiteSpace(value)
|
||||
? ClassifiedString.Empty
|
||||
: ClassifiedString.Public(value)
|
||||
});
|
||||
}
|
||||
|
||||
return list.Count == 0 ? Array.Empty<AuthEventProperty>() : list;
|
||||
}
|
||||
|
||||
private static string FormatHttpDate(DateTimeOffset value)
|
||||
{
|
||||
return value.UtcDateTime.ToString("r", CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
private sealed class PathStringComparer : IEqualityComparer<PathString>
|
||||
{
|
||||
public static readonly PathStringComparer Instance = new();
|
||||
|
||||
public bool Equals(PathString x, PathString y)
|
||||
{
|
||||
return string.Equals(Normalize(x).Value, Normalize(y).Value, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public int GetHashCode(PathString obj)
|
||||
{
|
||||
return Normalize(obj).Value?.GetHashCode(StringComparison.Ordinal) ?? 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class LegacyAuthDeprecationExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseLegacyAuthDeprecation(this IApplicationBuilder app)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(app);
|
||||
return app.UseMiddleware<LegacyAuthDeprecationMiddleware>();
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Net.Http.Headers;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Cryptography.Audit;
|
||||
|
||||
namespace StellaOps.Authority;
|
||||
|
||||
internal sealed class LegacyAuthDeprecationMiddleware
|
||||
{
|
||||
private const string LegacyEventType = "authority.api.legacy_endpoint";
|
||||
private const string SunsetHeaderName = "Sunset";
|
||||
|
||||
private static readonly IReadOnlyDictionary<PathString, PathString> LegacyEndpointMap =
|
||||
new Dictionary<PathString, PathString>(PathStringComparer.Instance)
|
||||
{
|
||||
[new PathString("/oauth/token")] = new PathString("/token"),
|
||||
[new PathString("/oauth/introspect")] = new PathString("/introspect"),
|
||||
[new PathString("/oauth/revoke")] = new PathString("/revoke")
|
||||
};
|
||||
|
||||
private readonly RequestDelegate next;
|
||||
private readonly AuthorityLegacyAuthEndpointOptions options;
|
||||
private readonly IAuthEventSink auditSink;
|
||||
private readonly TimeProvider clock;
|
||||
private readonly ILogger<LegacyAuthDeprecationMiddleware> logger;
|
||||
|
||||
public LegacyAuthDeprecationMiddleware(
|
||||
RequestDelegate next,
|
||||
IOptions<StellaOpsAuthorityOptions> authorityOptions,
|
||||
IAuthEventSink auditSink,
|
||||
TimeProvider clock,
|
||||
ILogger<LegacyAuthDeprecationMiddleware> logger)
|
||||
{
|
||||
this.next = next ?? throw new ArgumentNullException(nameof(next));
|
||||
if (authorityOptions is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(authorityOptions));
|
||||
}
|
||||
|
||||
options = authorityOptions.Value.ApiLifecycle.LegacyAuth ??
|
||||
throw new InvalidOperationException("Authority legacy auth endpoint options are not configured.");
|
||||
this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
|
||||
this.clock = clock ?? throw new ArgumentNullException(nameof(clock));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task InvokeAsync(HttpContext context)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
if (!options.Enabled)
|
||||
{
|
||||
await next(context).ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!TryResolveLegacyPath(context.Request.Path, out var canonicalPath))
|
||||
{
|
||||
await next(context).ConfigureAwait(false);
|
||||
return;
|
||||
}
|
||||
|
||||
var originalPath = context.Request.Path;
|
||||
context.Request.Path = canonicalPath;
|
||||
|
||||
logger.LogInformation(
|
||||
"Legacy Authority endpoint {OriginalPath} invoked; routing to {CanonicalPath} and emitting deprecation headers.",
|
||||
originalPath,
|
||||
canonicalPath);
|
||||
|
||||
AppendDeprecationHeaders(context.Response);
|
||||
|
||||
await next(context).ConfigureAwait(false);
|
||||
|
||||
await EmitAuditAsync(context, originalPath, canonicalPath).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static bool TryResolveLegacyPath(PathString path, out PathString canonicalPath)
|
||||
{
|
||||
if (LegacyEndpointMap.TryGetValue(Normalize(path), out canonicalPath))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
canonicalPath = PathString.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
private static PathString Normalize(PathString value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return PathString.Empty;
|
||||
}
|
||||
|
||||
var trimmed = value.Value!.TrimEnd('/');
|
||||
return new PathString(trimmed.Length == 0 ? "/" : trimmed.ToLowerInvariant());
|
||||
}
|
||||
|
||||
private void AppendDeprecationHeaders(HttpResponse response)
|
||||
{
|
||||
if (response.HasStarted)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var deprecation = FormatHttpDate(options.DeprecationDate);
|
||||
response.Headers["Deprecation"] = deprecation;
|
||||
|
||||
var sunset = FormatHttpDate(options.SunsetDate);
|
||||
response.Headers[SunsetHeaderName] = sunset;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.DocumentationUrl))
|
||||
{
|
||||
var linkValue = $"<{options.DocumentationUrl}>; rel=\"sunset\"";
|
||||
response.Headers.Append(HeaderNames.Link, linkValue);
|
||||
}
|
||||
|
||||
var warning = $"299 - \"Legacy Authority endpoint will be removed after {sunset}. Migrate to canonical endpoints before the sunset date.\"";
|
||||
response.Headers[HeaderNames.Warning] = warning;
|
||||
}
|
||||
|
||||
private async Task EmitAuditAsync(HttpContext context, PathString originalPath, PathString canonicalPath)
|
||||
{
|
||||
try
|
||||
{
|
||||
var correlation = Activity.Current?.TraceId.ToString() ?? context.TraceIdentifier;
|
||||
|
||||
var network = BuildNetwork(context);
|
||||
|
||||
var record = new AuthEventRecord
|
||||
{
|
||||
EventType = LegacyEventType,
|
||||
OccurredAt = clock.GetUtcNow(),
|
||||
CorrelationId = correlation,
|
||||
Outcome = AuthEventOutcome.Success,
|
||||
Reason = null,
|
||||
Subject = null,
|
||||
Client = null,
|
||||
Tenant = ClassifiedString.Empty,
|
||||
Project = ClassifiedString.Empty,
|
||||
Scopes = Array.Empty<string>(),
|
||||
Network = network,
|
||||
Properties = BuildProperties(
|
||||
("legacy.endpoint.original", originalPath.Value),
|
||||
("legacy.endpoint.canonical", canonicalPath.Value),
|
||||
("legacy.deprecation_at", options.DeprecationDate.ToString("O", CultureInfo.InvariantCulture)),
|
||||
("legacy.sunset_at", options.SunsetDate.ToString("O", CultureInfo.InvariantCulture)),
|
||||
("http.status_code", context.Response.StatusCode.ToString(CultureInfo.InvariantCulture)))
|
||||
};
|
||||
|
||||
await auditSink.WriteAsync(record, context.RequestAborted).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to emit legacy auth endpoint audit event.");
|
||||
}
|
||||
}
|
||||
|
||||
private static AuthEventNetwork? BuildNetwork(HttpContext context)
|
||||
{
|
||||
var remote = context.Connection.RemoteIpAddress?.ToString();
|
||||
var forwarded = context.Request.Headers["X-Forwarded-For"].ToString();
|
||||
var userAgent = context.Request.Headers.UserAgent.ToString();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(remote) &&
|
||||
string.IsNullOrWhiteSpace(forwarded) &&
|
||||
string.IsNullOrWhiteSpace(userAgent))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new AuthEventNetwork
|
||||
{
|
||||
RemoteAddress = ClassifiedString.Personal(Normalize(remote)),
|
||||
ForwardedFor = ClassifiedString.Personal(Normalize(forwarded)),
|
||||
UserAgent = ClassifiedString.Personal(Normalize(userAgent))
|
||||
};
|
||||
}
|
||||
|
||||
private static string? Normalize(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = value.Trim();
|
||||
return trimmed.Length == 0 ? null : trimmed;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<AuthEventProperty> BuildProperties(params (string Name, string? Value)[] entries)
|
||||
{
|
||||
if (entries.Length == 0)
|
||||
{
|
||||
return Array.Empty<AuthEventProperty>();
|
||||
}
|
||||
|
||||
var list = new List<AuthEventProperty>(entries.Length);
|
||||
foreach (var (name, value) in entries)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(new AuthEventProperty
|
||||
{
|
||||
Name = name,
|
||||
Value = string.IsNullOrWhiteSpace(value)
|
||||
? ClassifiedString.Empty
|
||||
: ClassifiedString.Public(value)
|
||||
});
|
||||
}
|
||||
|
||||
return list.Count == 0 ? Array.Empty<AuthEventProperty>() : list;
|
||||
}
|
||||
|
||||
private static string FormatHttpDate(DateTimeOffset value)
|
||||
{
|
||||
return value.UtcDateTime.ToString("r", CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
private sealed class PathStringComparer : IEqualityComparer<PathString>
|
||||
{
|
||||
public static readonly PathStringComparer Instance = new();
|
||||
|
||||
public bool Equals(PathString x, PathString y)
|
||||
{
|
||||
return string.Equals(Normalize(x).Value, Normalize(y).Value, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public int GetHashCode(PathString obj)
|
||||
{
|
||||
return Normalize(obj).Value?.GetHashCode(StringComparison.Ordinal) ?? 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class LegacyAuthDeprecationExtensions
|
||||
{
|
||||
public static IApplicationBuilder UseLegacyAuthDeprecation(this IApplicationBuilder app)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(app);
|
||||
return app.UseMiddleware<LegacyAuthDeprecationMiddleware>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,181 +1,181 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Authority.Signing;
|
||||
|
||||
internal sealed class AuthorityJwksService
|
||||
{
|
||||
private const string CacheKey = "authority:jwks:current";
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly ICryptoProviderRegistry registry;
|
||||
private readonly ILogger<AuthorityJwksService> logger;
|
||||
private readonly IMemoryCache cache;
|
||||
private readonly TimeProvider timeProvider;
|
||||
private readonly StellaOpsAuthorityOptions authorityOptions;
|
||||
|
||||
public AuthorityJwksService(
|
||||
ICryptoProviderRegistry registry,
|
||||
ILogger<AuthorityJwksService> logger,
|
||||
IMemoryCache cache,
|
||||
TimeProvider timeProvider,
|
||||
IOptions<StellaOpsAuthorityOptions> authorityOptions)
|
||||
{
|
||||
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
this.cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
if (authorityOptions is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(authorityOptions));
|
||||
}
|
||||
|
||||
this.authorityOptions = authorityOptions.Value ?? throw new ArgumentNullException(nameof(authorityOptions));
|
||||
}
|
||||
|
||||
public AuthorityJwksResult Get()
|
||||
{
|
||||
if (cache.TryGetValue(CacheKey, out AuthorityJwksCacheEntry? cached) &&
|
||||
cached is not null &&
|
||||
cached.ExpiresAt > timeProvider.GetUtcNow())
|
||||
{
|
||||
return cached.Result;
|
||||
}
|
||||
|
||||
var response = new AuthorityJwksResponse(BuildKeys());
|
||||
var signingOptions = authorityOptions.Signing;
|
||||
var lifetime = signingOptions.JwksCacheLifetime > TimeSpan.Zero
|
||||
? signingOptions.JwksCacheLifetime
|
||||
: TimeSpan.FromMinutes(5);
|
||||
var expires = timeProvider.GetUtcNow().Add(lifetime);
|
||||
var etag = ComputeEtag(response, expires);
|
||||
var cacheControl = $"public, max-age={(int)lifetime.TotalSeconds}";
|
||||
|
||||
var result = new AuthorityJwksResult(response, etag, expires, cacheControl);
|
||||
var entry = new AuthorityJwksCacheEntry(result, expires);
|
||||
|
||||
cache.Set(CacheKey, entry, new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = lifetime
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
public void Invalidate()
|
||||
{
|
||||
cache.Remove(CacheKey);
|
||||
}
|
||||
|
||||
private IReadOnlyCollection<JwksKeyEntry> BuildKeys()
|
||||
{
|
||||
var keys = new List<JwksKeyEntry>();
|
||||
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var provider in registry.Providers)
|
||||
{
|
||||
foreach (var signingKey in provider.GetSigningKeys())
|
||||
{
|
||||
var keyId = signingKey.Reference.KeyId;
|
||||
if (!seen.Add(keyId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var signer = provider.GetSigner(signingKey.AlgorithmId, signingKey.Reference);
|
||||
var jwk = signer.ExportPublicJsonWebKey();
|
||||
var keyUse = signingKey.Metadata.TryGetValue("use", out var metadataUse) && !string.IsNullOrWhiteSpace(metadataUse)
|
||||
? metadataUse
|
||||
: jwk.Use;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(keyUse))
|
||||
{
|
||||
keyUse = "sig";
|
||||
}
|
||||
|
||||
var entry = new JwksKeyEntry
|
||||
{
|
||||
Kid = jwk.Kid,
|
||||
Kty = jwk.Kty,
|
||||
Use = keyUse,
|
||||
Alg = jwk.Alg,
|
||||
Crv = jwk.Crv,
|
||||
X = jwk.X,
|
||||
Y = jwk.Y,
|
||||
Status = signingKey.Metadata.TryGetValue("status", out var status) ? status : "active"
|
||||
};
|
||||
keys.Add(entry);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to export JWKS entry for key {KeyId}.", keyId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
keys.Sort(static (left, right) => string.Compare(left.Kid, right.Kid, StringComparison.Ordinal));
|
||||
return keys;
|
||||
}
|
||||
|
||||
private static string ComputeEtag(AuthorityJwksResponse response, DateTimeOffset expiresAt)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(response, SerializerOptions);
|
||||
var buffer = Encoding.UTF8.GetBytes(payload + "|" + expiresAt.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture));
|
||||
var hash = SHA256.HashData(buffer);
|
||||
return $"\"{Convert.ToHexString(hash)}\"";
|
||||
}
|
||||
|
||||
private sealed record AuthorityJwksCacheEntry(AuthorityJwksResult Result, DateTimeOffset ExpiresAt);
|
||||
}
|
||||
|
||||
internal sealed record AuthorityJwksResponse([property: JsonPropertyName("keys")] IReadOnlyCollection<JwksKeyEntry> Keys);
|
||||
|
||||
internal sealed record AuthorityJwksResult(
|
||||
AuthorityJwksResponse Response,
|
||||
string ETag,
|
||||
DateTimeOffset ExpiresAt,
|
||||
string CacheControl);
|
||||
|
||||
internal sealed class JwksKeyEntry
|
||||
{
|
||||
[JsonPropertyName("kty")]
|
||||
public string? Kty { get; set; }
|
||||
|
||||
[JsonPropertyName("use")]
|
||||
public string? Use { get; set; }
|
||||
|
||||
[JsonPropertyName("kid")]
|
||||
public string? Kid { get; set; }
|
||||
|
||||
[JsonPropertyName("alg")]
|
||||
public string? Alg { get; set; }
|
||||
|
||||
[JsonPropertyName("crv")]
|
||||
public string? Crv { get; set; }
|
||||
|
||||
[JsonPropertyName("x")]
|
||||
public string? X { get; set; }
|
||||
|
||||
[JsonPropertyName("y")]
|
||||
public string? Y { get; set; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Status { get; set; }
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Authority.Signing;
|
||||
|
||||
internal sealed class AuthorityJwksService
|
||||
{
|
||||
private const string CacheKey = "authority:jwks:current";
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly ICryptoProviderRegistry registry;
|
||||
private readonly ILogger<AuthorityJwksService> logger;
|
||||
private readonly IMemoryCache cache;
|
||||
private readonly TimeProvider timeProvider;
|
||||
private readonly StellaOpsAuthorityOptions authorityOptions;
|
||||
|
||||
public AuthorityJwksService(
|
||||
ICryptoProviderRegistry registry,
|
||||
ILogger<AuthorityJwksService> logger,
|
||||
IMemoryCache cache,
|
||||
TimeProvider timeProvider,
|
||||
IOptions<StellaOpsAuthorityOptions> authorityOptions)
|
||||
{
|
||||
this.registry = registry ?? throw new ArgumentNullException(nameof(registry));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
this.cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
if (authorityOptions is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(authorityOptions));
|
||||
}
|
||||
|
||||
this.authorityOptions = authorityOptions.Value ?? throw new ArgumentNullException(nameof(authorityOptions));
|
||||
}
|
||||
|
||||
public AuthorityJwksResult Get()
|
||||
{
|
||||
if (cache.TryGetValue(CacheKey, out AuthorityJwksCacheEntry? cached) &&
|
||||
cached is not null &&
|
||||
cached.ExpiresAt > timeProvider.GetUtcNow())
|
||||
{
|
||||
return cached.Result;
|
||||
}
|
||||
|
||||
var response = new AuthorityJwksResponse(BuildKeys());
|
||||
var signingOptions = authorityOptions.Signing;
|
||||
var lifetime = signingOptions.JwksCacheLifetime > TimeSpan.Zero
|
||||
? signingOptions.JwksCacheLifetime
|
||||
: TimeSpan.FromMinutes(5);
|
||||
var expires = timeProvider.GetUtcNow().Add(lifetime);
|
||||
var etag = ComputeEtag(response, expires);
|
||||
var cacheControl = $"public, max-age={(int)lifetime.TotalSeconds}";
|
||||
|
||||
var result = new AuthorityJwksResult(response, etag, expires, cacheControl);
|
||||
var entry = new AuthorityJwksCacheEntry(result, expires);
|
||||
|
||||
cache.Set(CacheKey, entry, new MemoryCacheEntryOptions
|
||||
{
|
||||
AbsoluteExpirationRelativeToNow = lifetime
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
public void Invalidate()
|
||||
{
|
||||
cache.Remove(CacheKey);
|
||||
}
|
||||
|
||||
private IReadOnlyCollection<JwksKeyEntry> BuildKeys()
|
||||
{
|
||||
var keys = new List<JwksKeyEntry>();
|
||||
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var provider in registry.Providers)
|
||||
{
|
||||
foreach (var signingKey in provider.GetSigningKeys())
|
||||
{
|
||||
var keyId = signingKey.Reference.KeyId;
|
||||
if (!seen.Add(keyId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var signer = provider.GetSigner(signingKey.AlgorithmId, signingKey.Reference);
|
||||
var jwk = signer.ExportPublicJsonWebKey();
|
||||
var keyUse = signingKey.Metadata.TryGetValue("use", out var metadataUse) && !string.IsNullOrWhiteSpace(metadataUse)
|
||||
? metadataUse
|
||||
: jwk.Use;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(keyUse))
|
||||
{
|
||||
keyUse = "sig";
|
||||
}
|
||||
|
||||
var entry = new JwksKeyEntry
|
||||
{
|
||||
Kid = jwk.Kid,
|
||||
Kty = jwk.Kty,
|
||||
Use = keyUse,
|
||||
Alg = jwk.Alg,
|
||||
Crv = jwk.Crv,
|
||||
X = jwk.X,
|
||||
Y = jwk.Y,
|
||||
Status = signingKey.Metadata.TryGetValue("status", out var status) ? status : "active"
|
||||
};
|
||||
keys.Add(entry);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogWarning(ex, "Failed to export JWKS entry for key {KeyId}.", keyId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
keys.Sort(static (left, right) => string.Compare(left.Kid, right.Kid, StringComparison.Ordinal));
|
||||
return keys;
|
||||
}
|
||||
|
||||
private static string ComputeEtag(AuthorityJwksResponse response, DateTimeOffset expiresAt)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(response, SerializerOptions);
|
||||
var buffer = Encoding.UTF8.GetBytes(payload + "|" + expiresAt.ToUnixTimeSeconds().ToString(CultureInfo.InvariantCulture));
|
||||
var hash = SHA256.HashData(buffer);
|
||||
return $"\"{Convert.ToHexString(hash)}\"";
|
||||
}
|
||||
|
||||
private sealed record AuthorityJwksCacheEntry(AuthorityJwksResult Result, DateTimeOffset ExpiresAt);
|
||||
}
|
||||
|
||||
internal sealed record AuthorityJwksResponse([property: JsonPropertyName("keys")] IReadOnlyCollection<JwksKeyEntry> Keys);
|
||||
|
||||
internal sealed record AuthorityJwksResult(
|
||||
AuthorityJwksResponse Response,
|
||||
string ETag,
|
||||
DateTimeOffset ExpiresAt,
|
||||
string CacheControl);
|
||||
|
||||
internal sealed class JwksKeyEntry
|
||||
{
|
||||
[JsonPropertyName("kty")]
|
||||
public string? Kty { get; set; }
|
||||
|
||||
[JsonPropertyName("use")]
|
||||
public string? Use { get; set; }
|
||||
|
||||
[JsonPropertyName("kid")]
|
||||
public string? Kid { get; set; }
|
||||
|
||||
[JsonPropertyName("alg")]
|
||||
public string? Alg { get; set; }
|
||||
|
||||
[JsonPropertyName("crv")]
|
||||
public string? Crv { get; set; }
|
||||
|
||||
[JsonPropertyName("x")]
|
||||
public string? X { get; set; }
|
||||
|
||||
[JsonPropertyName("y")]
|
||||
public string? Y { get; set; }
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Status { get; set; }
|
||||
}
|
||||
|
||||
@@ -1,170 +1,170 @@
|
||||
# Authority Host Task Board — Epic 1: Aggregation-Only Contract
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-26: Rate limiter metadata/audit records now include tenants, password grant scopes/tenants enforced, token persistence + tests updated. Docs refresh tracked via AUTH-AOC-19-003.
|
||||
> 2025-10-27: Client credential ingestion scopes now require tenant assignment; access token validation backfills tenants and rejects cross-tenant mismatches with tests.
|
||||
> 2025-10-27: `dotnet test` blocked — Concelier build fails (`AdvisoryObservationQueryService` returns `ImmutableHashSet<string?>`), preventing Authority test suite run; waiting on Concelier fix before rerun.
|
||||
> 2025-10-26: Docs updated (`docs/11_AUTHORITY.md`, Concelier audit runbook, `docs/security/authority-scopes.md`); sample config highlights tenant-aware clients. Release notes + smoke verification pending (blocked on Concelier/Excititor smoke updates).
|
||||
> 2025-10-27: Scope catalogue aligned with `advisory:ingest/advisory:read/vex:ingest/vex:read`, `aoc:verify` pairing documented, console/CLI references refreshed, and `etc/authority.yaml.sample` updated to require read scopes for verification clients.
|
||||
> 2025-10-31: Client credentials and password grants now reject advisory/vex read or signals scopes without `aoc:verify`, enforce tenant assignment for `aoc:verify`, tag violations via `authority.aoc_scope_violation`, extend tests, and refresh scope catalogue docs/sample roles.
|
||||
|
||||
## Link-Not-Merge v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Rejected legacy `concelier.merge` scope during client credential validation, removed it from known scope catalog, blocked discovery/issuance, added regression tests, and refreshed scope documentation.
|
||||
|
||||
## Policy Engine v2
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-26: Restricted `effective:write` to Policy Engine service identities with tenant requirement, registered full scope set, and tightened resource server default scope enforcement (unit tests pass).
|
||||
> 2025-10-26: Authority docs now detail policy scopes/service identity guardrails with checklist; `authority.yaml.sample` includes `properties.serviceIdentity` example.
|
||||
|
||||
## Graph Explorer v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
|
||||
## Policy Engine + Editor v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-POLICY-23-002 | BLOCKED (2025-10-29) | Authority Core & Security Guild | AUTH-POLICY-23-001 | Implement optional two-person rule for activation: require two distinct `policy:activate` approvals when configured; emit audit logs. | Activation endpoint enforces rule; audit logs contain approver IDs; tests cover 2-person path. |
|
||||
> Blocked: Policy Engine/Studio have not yet exposed activation workflow endpoints or approval payloads needed to enforce dual-control (`WEB-POLICY-23-002`, `POLICY-ENGINE-23-002`). Revisit once activation contract lands.
|
||||
| AUTH-POLICY-23-003 | BLOCKED (2025-10-29) | Authority Core & Docs Guild | AUTH-POLICY-23-001 | Update documentation and sample configs for policy roles, approval workflow, and signing requirements. | Docs updated with reviewer checklist; configuration examples validated. |
|
||||
> Blocked pending AUTH-POLICY-23-002 dual-approval implementation so docs can capture final activation behaviour.
|
||||
> 2025-10-27: Added `policy-cli` defaults to Authority config/secrets, refreshed CLI/CI documentation with the new scope bundle, recorded release migration guidance, and introduced `scripts/verify-policy-scopes.py` to guard against regressions.
|
||||
|
||||
## Graph & Vuln Explorer v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-27: Paused work after exploratory spike (scope enforcement still outstanding); no functional changes merged.
|
||||
|
||||
## Orchestrator Dashboard
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-31: Picked up during Console/Orchestrator alignment; focusing on scope catalog + tenant enforcement first.
|
||||
> 2025-10-31: `orch:read` added to scope catalogue and Authority runtime, Console defaults include the scope, `Orch.Viewer` role documented, and client-credential tests enforce tenant requirements.
|
||||
> 2025-10-27: Added `orch:operate` scope, enforced `operator_reason`/`operator_ticket` on token issuance, updated Authority configs/docs, and captured audit metadata for control actions.
|
||||
> 2025-10-28: Policy gateway + scanner now pass the expanded token client signature (`null` metadata by default), test stubs capture the optional parameters, and Policy Gateway/Scanner suites are green after fixing the Concelier storage build break.
|
||||
> 2025-10-28: Authority password-grant tests now hit the new constructors but still need updates to drop obsolete `IOptions` arguments before the suite can pass.
|
||||
| AUTH-ORCH-34-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-ORCH-33-001 | Introduce `Orch.Admin` role with quota/backfill scopes, enforce audit reason on quota changes, and update offline defaults/docs. | Admin role available; quotas/backfills require scope + reason; tests confirm tenant isolation; documentation updated. |
|
||||
> 2025-11-02: `orch:backfill` scope added with mandatory `backfill_reason`/`backfill_ticket`, client-credential validation and resource authorization paths emit audit fields, CLI picks up new configuration/env vars, and Authority docs/config samples updated for `Orch.Admin`.
|
||||
|
||||
## StellaOps Console (Sprint 23)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Authorization code flow enabled with PKCE requirement, console client seeded in `authority.yaml.sample`, discovery docs updated, and console runbook guidance added.
|
||||
> 2025-10-31: Added `/console/tenants`, `/console/profile`, `/console/token/introspect` endpoints with tenant header filter, scope enforcement (`ui.read`, `authority:tenants.read`), and structured audit events. Console test harness covers success/mismatch cases.
|
||||
> 2025-10-28: `docs/security/console-security.md` drafted with PKCE + DPoP (120 s OpTok, 300 s fresh-auth) and scope table. Authority Core to confirm `/fresh-auth` semantics, token lifetimes, and scope bundles align before closing task.
|
||||
> 2025-10-31: Security guide expanded for `/console` endpoints & orchestrator scope, sample YAML annotated, ops runbook updated, and release note `docs/updates/2025-10-31-console-security-refresh.md` published.
|
||||
> 2025-10-31: Default access-token lifetime reduced to 120 s, console tests updated with dual auth schemes, docs/config/ops notes refreshed, release note logged.
|
||||
|
||||
## Policy Studio (Sprint 27)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-31: Added Policy Studio scope family (`policy:author/review/operate/audit`), updated OpenAPI + discovery headers, enforced tenant requirements in grant handlers, seeded new roles in Authority config/offline kit docs, and refreshed CLI/Console documentation + tests to validate the new catalogue.
|
||||
| AUTH-POLICY-27-002 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-POLICY-27-001, REGISTRY-API-27-007 | Provide attestation signing service bindings (OIDC token exchange, cosign integration) and enforce publish/promote scope checks, fresh-auth requirements, and audit logging. | Publish/promote requests require fresh auth + correct scopes; attestations signed with validated identity; audit logs enriched with digest + tenant; integration tests pass. |
|
||||
> Docs dependency: `DOCS-POLICY-27-009` awaiting signing guidance from this work.
|
||||
> 2025-11-02: Added `policy:publish`/`policy:promote` scopes with interactive-only enforcement, metadata parameters (`policy_reason`, `policy_ticket`, `policy_digest`), fresh-auth token validation, audit augmentations, and updated config/docs references.
|
||||
| AUTH-POLICY-27-003 | DONE (2025-11-04) | Authority Core & Docs Guild | AUTH-POLICY-27-001, AUTH-POLICY-27-002 | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. | Docs merged; samples validated; governance checklist appended; release notes updated. |
|
||||
> 2025-11-04: Policy Studio roles/scopes documented across `docs/11_AUTHORITY.md`, sample configs, and OpenAPI; compliance checklist appended and Authority tests rerun to validate fresh-auth + scope enforcement.
|
||||
|
||||
## Exceptions v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Added exception scopes + routing template options, enforced MFA requirement in password grant handlers, updated configuration samples.
|
||||
> 2025-10-31: Authority scopes/routing docs updated (`docs/security/authority-scopes.md`, `docs/11_AUTHORITY.md`, `docs/policy/exception-effects.md`), monitoring guide covers new MFA audit events, and `etc/authority.yaml.sample` now demonstrates exception clients/templates.
|
||||
|
||||
## Reachability v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Signals scopes added with tenant + aoc:verify enforcement; sensors guided via SignalsUploader template; tests cover gating.
|
||||
|
||||
## Vulnerability Explorer (Sprint 29)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-VULN-29-001 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-POLICY-27-001 | Define Vuln Explorer scopes/roles (`vuln:view`, `vuln:investigate`, `vuln:operate`, `vuln:audit`) with ABAC attributes (env, owner, business_tier) and update discovery metadata/offline kit defaults. | Roles/scopes published; issuer templates updated; integration tests cover ABAC filters; docs refreshed. |
|
||||
| AUTH-VULN-29-002 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-VULN-29-001, LEDGER-29-002 | Enforce CSRF/anti-forgery tokens for workflow actions, sign attachment tokens, and record audit logs with ledger event hashes. | Workflow calls require valid tokens; audit logs include ledger references; security tests cover token expiry/abuse. |
|
||||
| AUTH-VULN-29-003 | DONE (2025-11-04) | Authority Core & Docs Guild | AUTH-VULN-29-001..002 | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. | Docs merged with compliance checklist; configuration examples validated; release notes updated. |
|
||||
> 2025-11-03: Vuln workflow CSRF + attachment token services live with audit enrichment and negative-path tests. Awaiting completion of full Authority suite run after repository-wide build finishes.
|
||||
> 2025-11-04: Verified Vuln Explorer RBAC/ABAC coverage in Authority docs/security guides, attachment token guidance, and offline samples; Authority tests rerun confirming ledger-token + anti-forgery behaviours.
|
||||
|
||||
## Advisory AI (Sprint 31)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-AIAI-31-001 | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Advisory AI scopes (`advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`) and remote inference toggles; update discovery metadata/offline defaults. | Scopes/flags published; integration tests cover RBAC + opt-in settings; docs updated. |
|
||||
| AUTH-AIAI-31-002 | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-AIAI-31-001, AIAI-31-006 | Enforce anonymized prompt logging, tenant consent for remote inference, and audit logging of assistant tasks. | Logging/audit flows verified; privacy review passed; docs updated. |
|
||||
|
||||
## Export Center
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
|
||||
## Notifications Studio
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-NOTIFY-38-001 | DONE (2025-11-01) | Authority Core & Security Guild | — | Define `Notify.Viewer`, `Notify.Operator`, `Notify.Admin` scopes/roles, update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit defaults refreshed. |
|
||||
| AUTH-NOTIFY-40-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-NOTIFY-38-001, WEB-NOTIFY-40-001 | Implement signed ack token key rotation, webhook allowlists, admin-only escalation settings, and audit logging of ack actions. | Ack tokens signed/rotated; webhook allowlists enforced; admin enforcement validated; audit logs capture ack/resolution. |
|
||||
> 2025-11-02: `/notify/ack-tokens/rotate` exposed (notify.admin), emits `notify.ack.key_rotated|notify.ack.key_rotation_failed`, and DSSE rotation tests cover allowlist + scope enforcement.
|
||||
| AUTH-NOTIFY-42-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-NOTIFY-40-001 | Investigate ack token rotation 500 errors (test Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure still failing). Capture logs, identify root cause, and patch handler. | Failure mode understood; fix merged; regression test passes. |
|
||||
> 2025-11-02: Aliased `StellaOpsBearer` to the test auth handler, corrected bootstrap `/notifications/ack-tokens/rotate` defaults, and validated `Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure` via targeted `dotnet test`.
|
||||
|
||||
|
||||
## CLI Parity & Task Packs
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-PACKS-41-001 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-AOC-19-001 | Define CLI SSO profiles and pack scopes (`Packs.Read`, `Packs.Write`, `Packs.Run`, `Packs.Approve`), update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit templates refreshed. |
|
||||
> 2025-11-02: Added Pack scope policies, Authority role defaults, and CLI profile guidance covering operator/publisher/approver flows.
|
||||
> 2025-11-02: Shared OpenSSL 1.1 shim feeds Authority & Signals Mongo2Go harnesses so pack scope coverage keeps running on OpenSSL 3 hosts (AUTH-PACKS-41-001).
|
||||
> 2025-11-04: Discovery metadata/OpenAPI advertise packs scopes, configs/offline kit templates bundle new roles, and Authority tests re-run to validate tenant gating for `packs.*`.
|
||||
| AUTH-PACKS-43-001 | BLOCKED (2025-10-27) | Authority Core & Security Guild | AUTH-PACKS-41-001, TASKRUN-42-001, ORCH-SVC-42-101 | Enforce pack signing policies, approval RBAC checks, CLI CI token scopes, and audit logging for approvals. | Signing policies enforced; approvals require correct roles; CI token scope tests pass; audit logs recorded. |
|
||||
> Blocked: Task Runner approval APIs (`ORCH-SVC-42-101`, `TASKRUN-42-001`) still outstanding. Pack scope catalog (AUTH-PACKS-41-001) landed 2025-11-04; resume once execution/approval contracts are published.
|
||||
|
||||
## Authority-Backed Scopes & Tenancy (Epic 14)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-28: Tidied advisory raw idempotency migration to avoid LINQ-on-`BsonValue` (explicit array copy) while continuing duplicate guardrail validation; scoped scanner/policy token call sites updated to honor new metadata parameter.
|
||||
| AUTH-TEN-49-001 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. | Service tokens minted with scopes/TTL; delegation logged; quotas configurable; audit stream live; docs updated. |
|
||||
> 2025-11-02: Authority bootstrap test harness now seeds service accounts via AuthorityDelegation options; `/internal/service-accounts` endpoints validated with targeted vstest run.
|
||||
> 2025-11-02: Added Mongo service-account store, seeded options/collection initializers, token persistence metadata (`tokenKind`, `serviceAccountId`, `actorChain`), and docs/config samples. Introduced quota checks + tests covering service account issuance and persistence.
|
||||
> 2025-11-02: Documented bootstrap service-account admin APIs in `docs/11_AUTHORITY.md`, noting API key requirements and stable upsert behaviour.
|
||||
> 2025-11-03: Seeded explicit enabled service-account fixtures for integration tests and reran `StellaOps.Authority.Tests` to greenlight `/internal/service-accounts` listing + revocation scenarios.
|
||||
> 2025-11-04: Confirmed service-account docs/config examples, quota tuning, and audit stream wiring; Authority suite re-executed to cover issuance/listing/revocation flows.
|
||||
|
||||
## Observability & Forensics (Epic 15)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-OBS-50-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-AOC-19-001 | Introduce scopes `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` (all tenant-scoped). Update discovery metadata, offline defaults, and scope grammar docs. | Scopes exposed via metadata; issuer templates updated; offline kit seeded; integration tests cover new scopes. |
|
||||
| AUTH-OBS-52-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-OBS-50-001, TIMELINE-OBS-52-003, EVID-OBS-53-003 | Configure resource server policies for Timeline Indexer, Evidence Locker, Exporter, and Observability APIs enforcing new scopes + tenant claims. Emit audit events including scope usage and trace IDs. | Policies deployed; unauthorized access blocked; audit logs prove scope usage; contract tests updated. |
|
||||
| AUTH-OBS-55-001 | DONE (2025-11-02) | Authority Core & Security Guild, Ops Guild | AUTH-OBS-50-001, WEB-OBS-55-001 | Harden incident mode authorization: require `obs:incident` scope + fresh auth, log activation reason, and expose verification endpoint for auditors. Update docs/runbooks. | Incident activate/deactivate requires scope; audit entries logged; docs updated with imposed rule reminder. |
|
||||
|
||||
## Air-Gapped Mode (Epic 16)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-AIRGAP-56-001 | DONE (2025-11-04) | Authority Core & Security Guild | AIRGAP-CTL-56-001 | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Scopes exposed in discovery docs; offline kit updated; integration tests cover issuance. |
|
||||
| AUTH-AIRGAP-56-002 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-AIRGAP-56-001, AIRGAP-IMP-58-001 | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Audit records persisted; endpoint paginates results; tests cover RBAC + filtering. |
|
||||
> 2025-11-04: Airgap scope constants are wired through discovery metadata, `etc/authority.yaml.sample`, and offline kit docs; scope issuance tests executed via `dotnet test`.
|
||||
> 2025-11-04: `/authority/audit/airgap` API persists tenant-scoped audit entries with pagination and authorization guards validated by the Authority integration suite (187 tests).
|
||||
| AUTH-AIRGAP-57-001 | BLOCKED (2025-11-01) | Authority Core & Security Guild, DevOps Guild | AUTH-AIRGAP-56-001, DEVOPS-AIRGAP-57-002 | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. | Awaiting clarified sealed-confirmation contract and configuration structure before implementation. |
|
||||
> 2025-11-01: AUTH-AIRGAP-57-001 blocked pending guidance on sealed-confirmation contract and configuration expectations before gating changes (Authority Core & Security Guild, DevOps Guild).
|
||||
|
||||
## SDKs & OpenAPI (Epic 17)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-28: Auth OpenAPI authored at `src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml` covering `/token`, `/introspect`, `/revoke`, `/jwks`, scope catalog, and error envelopes; parsed via PyYAML sanity check and referenced in Epic 17 docs.
|
||||
> 2025-10-28: Added `/.well-known/openapi` endpoint wiring cached spec metadata, YAML/JSON negotiation, HTTP cache headers, and tests verifying ETag + Accept handling. Authority spec (`src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml`) now includes grant/scope extensions.
|
||||
| AUTH-OAS-62-001 | DONE (2025-11-02) | Authority Core & Security Guild, SDK Generator Guild | AUTH-OAS-61-001, SDKGEN-63-001 | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | SDKs expose auth helpers; tests cover token issuance; docs updated. |
|
||||
> 2025-11-02: `AddStellaOpsApiAuthentication` shipped (OAuth2 + PAT), tenant header injection added, and client tests updated for caching behaviour.
|
||||
| AUTH-OAS-63-001 | DONE (2025-11-02) | Authority Core & Security Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and notifications for legacy auth endpoints. | Headers emitted; notifications verified; migration guide published. |
|
||||
> 2025-11-02: AUTH-OAS-63-001 completed — legacy OAuth shims emit Deprecation/Sunset/Warning headers, audit events captured, and migration guide published (Authority Core & Security Guild, API Governance Guild).
|
||||
# Authority Host Task Board — Epic 1: Aggregation-Only Contract
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-26: Rate limiter metadata/audit records now include tenants, password grant scopes/tenants enforced, token persistence + tests updated. Docs refresh tracked via AUTH-AOC-19-003.
|
||||
> 2025-10-27: Client credential ingestion scopes now require tenant assignment; access token validation backfills tenants and rejects cross-tenant mismatches with tests.
|
||||
> 2025-10-27: `dotnet test` blocked — Concelier build fails (`AdvisoryObservationQueryService` returns `ImmutableHashSet<string?>`), preventing Authority test suite run; waiting on Concelier fix before rerun.
|
||||
> 2025-10-26: Docs updated (`docs/11_AUTHORITY.md`, Concelier audit runbook, `docs/security/authority-scopes.md`); sample config highlights tenant-aware clients. Release notes + smoke verification pending (blocked on Concelier/Excititor smoke updates).
|
||||
> 2025-10-27: Scope catalogue aligned with `advisory:ingest/advisory:read/vex:ingest/vex:read`, `aoc:verify` pairing documented, console/CLI references refreshed, and `etc/authority.yaml.sample` updated to require read scopes for verification clients.
|
||||
> 2025-10-31: Client credentials and password grants now reject advisory/vex read or signals scopes without `aoc:verify`, enforce tenant assignment for `aoc:verify`, tag violations via `authority.aoc_scope_violation`, extend tests, and refresh scope catalogue docs/sample roles.
|
||||
|
||||
## Link-Not-Merge v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Rejected legacy `concelier.merge` scope during client credential validation, removed it from known scope catalog, blocked discovery/issuance, added regression tests, and refreshed scope documentation.
|
||||
|
||||
## Policy Engine v2
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-26: Restricted `effective:write` to Policy Engine service identities with tenant requirement, registered full scope set, and tightened resource server default scope enforcement (unit tests pass).
|
||||
> 2025-10-26: Authority docs now detail policy scopes/service identity guardrails with checklist; `authority.yaml.sample` includes `properties.serviceIdentity` example.
|
||||
|
||||
## Graph Explorer v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
|
||||
## Policy Engine + Editor v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-POLICY-23-002 | BLOCKED (2025-10-29) | Authority Core & Security Guild | AUTH-POLICY-23-001 | Implement optional two-person rule for activation: require two distinct `policy:activate` approvals when configured; emit audit logs. | Activation endpoint enforces rule; audit logs contain approver IDs; tests cover 2-person path. |
|
||||
> Blocked: Policy Engine/Studio have not yet exposed activation workflow endpoints or approval payloads needed to enforce dual-control (`WEB-POLICY-23-002`, `POLICY-ENGINE-23-002`). Revisit once activation contract lands.
|
||||
| AUTH-POLICY-23-003 | BLOCKED (2025-10-29) | Authority Core & Docs Guild | AUTH-POLICY-23-001 | Update documentation and sample configs for policy roles, approval workflow, and signing requirements. | Docs updated with reviewer checklist; configuration examples validated. |
|
||||
> Blocked pending AUTH-POLICY-23-002 dual-approval implementation so docs can capture final activation behaviour.
|
||||
> 2025-10-27: Added `policy-cli` defaults to Authority config/secrets, refreshed CLI/CI documentation with the new scope bundle, recorded release migration guidance, and introduced `scripts/verify-policy-scopes.py` to guard against regressions.
|
||||
|
||||
## Graph & Vuln Explorer v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-27: Paused work after exploratory spike (scope enforcement still outstanding); no functional changes merged.
|
||||
|
||||
## Orchestrator Dashboard
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-31: Picked up during Console/Orchestrator alignment; focusing on scope catalog + tenant enforcement first.
|
||||
> 2025-10-31: `orch:read` added to scope catalogue and Authority runtime, Console defaults include the scope, `Orch.Viewer` role documented, and client-credential tests enforce tenant requirements.
|
||||
> 2025-10-27: Added `orch:operate` scope, enforced `operator_reason`/`operator_ticket` on token issuance, updated Authority configs/docs, and captured audit metadata for control actions.
|
||||
> 2025-10-28: Policy gateway + scanner now pass the expanded token client signature (`null` metadata by default), test stubs capture the optional parameters, and Policy Gateway/Scanner suites are green after fixing the Concelier storage build break.
|
||||
> 2025-10-28: Authority password-grant tests now hit the new constructors but still need updates to drop obsolete `IOptions` arguments before the suite can pass.
|
||||
| AUTH-ORCH-34-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-ORCH-33-001 | Introduce `Orch.Admin` role with quota/backfill scopes, enforce audit reason on quota changes, and update offline defaults/docs. | Admin role available; quotas/backfills require scope + reason; tests confirm tenant isolation; documentation updated. |
|
||||
> 2025-11-02: `orch:backfill` scope added with mandatory `backfill_reason`/`backfill_ticket`, client-credential validation and resource authorization paths emit audit fields, CLI picks up new configuration/env vars, and Authority docs/config samples updated for `Orch.Admin`.
|
||||
|
||||
## StellaOps Console (Sprint 23)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Authorization code flow enabled with PKCE requirement, console client seeded in `authority.yaml.sample`, discovery docs updated, and console runbook guidance added.
|
||||
> 2025-10-31: Added `/console/tenants`, `/console/profile`, `/console/token/introspect` endpoints with tenant header filter, scope enforcement (`ui.read`, `authority:tenants.read`), and structured audit events. Console test harness covers success/mismatch cases.
|
||||
> 2025-10-28: `docs/security/console-security.md` drafted with PKCE + DPoP (120 s OpTok, 300 s fresh-auth) and scope table. Authority Core to confirm `/fresh-auth` semantics, token lifetimes, and scope bundles align before closing task.
|
||||
> 2025-10-31: Security guide expanded for `/console` endpoints & orchestrator scope, sample YAML annotated, ops runbook updated, and release note `docs/updates/2025-10-31-console-security-refresh.md` published.
|
||||
> 2025-10-31: Default access-token lifetime reduced to 120 s, console tests updated with dual auth schemes, docs/config/ops notes refreshed, release note logged.
|
||||
|
||||
## Policy Studio (Sprint 27)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-31: Added Policy Studio scope family (`policy:author/review/operate/audit`), updated OpenAPI + discovery headers, enforced tenant requirements in grant handlers, seeded new roles in Authority config/offline kit docs, and refreshed CLI/Console documentation + tests to validate the new catalogue.
|
||||
| AUTH-POLICY-27-002 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-POLICY-27-001, REGISTRY-API-27-007 | Provide attestation signing service bindings (OIDC token exchange, cosign integration) and enforce publish/promote scope checks, fresh-auth requirements, and audit logging. | Publish/promote requests require fresh auth + correct scopes; attestations signed with validated identity; audit logs enriched with digest + tenant; integration tests pass. |
|
||||
> Docs dependency: `DOCS-POLICY-27-009` awaiting signing guidance from this work.
|
||||
> 2025-11-02: Added `policy:publish`/`policy:promote` scopes with interactive-only enforcement, metadata parameters (`policy_reason`, `policy_ticket`, `policy_digest`), fresh-auth token validation, audit augmentations, and updated config/docs references.
|
||||
| AUTH-POLICY-27-003 | DONE (2025-11-04) | Authority Core & Docs Guild | AUTH-POLICY-27-001, AUTH-POLICY-27-002 | Update Authority configuration/docs for Policy Studio roles, signing policies, approval workflows, and CLI integration; include compliance checklist. | Docs merged; samples validated; governance checklist appended; release notes updated. |
|
||||
> 2025-11-04: Policy Studio roles/scopes documented across `docs/11_AUTHORITY.md`, sample configs, and OpenAPI; compliance checklist appended and Authority tests rerun to validate fresh-auth + scope enforcement.
|
||||
|
||||
## Exceptions v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Added exception scopes + routing template options, enforced MFA requirement in password grant handlers, updated configuration samples.
|
||||
> 2025-10-31: Authority scopes/routing docs updated (`docs/security/authority-scopes.md`, `docs/11_AUTHORITY.md`, `docs/policy/exception-effects.md`), monitoring guide covers new MFA audit events, and `etc/authority.yaml.sample` now demonstrates exception clients/templates.
|
||||
|
||||
## Reachability v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Signals scopes added with tenant + aoc:verify enforcement; sensors guided via SignalsUploader template; tests cover gating.
|
||||
|
||||
## Vulnerability Explorer (Sprint 29)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-VULN-29-001 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-POLICY-27-001 | Define Vuln Explorer scopes/roles (`vuln:view`, `vuln:investigate`, `vuln:operate`, `vuln:audit`) with ABAC attributes (env, owner, business_tier) and update discovery metadata/offline kit defaults. | Roles/scopes published; issuer templates updated; integration tests cover ABAC filters; docs refreshed. |
|
||||
| AUTH-VULN-29-002 | DONE (2025-11-03) | Authority Core & Security Guild | AUTH-VULN-29-001, LEDGER-29-002 | Enforce CSRF/anti-forgery tokens for workflow actions, sign attachment tokens, and record audit logs with ledger event hashes. | Workflow calls require valid tokens; audit logs include ledger references; security tests cover token expiry/abuse. |
|
||||
| AUTH-VULN-29-003 | DONE (2025-11-04) | Authority Core & Docs Guild | AUTH-VULN-29-001..002 | Update security docs/config samples for Vuln Explorer roles, ABAC policies, attachment signing, and ledger verification guidance. | Docs merged with compliance checklist; configuration examples validated; release notes updated. |
|
||||
> 2025-11-03: Vuln workflow CSRF + attachment token services live with audit enrichment and negative-path tests. Awaiting completion of full Authority suite run after repository-wide build finishes.
|
||||
> 2025-11-04: Verified Vuln Explorer RBAC/ABAC coverage in Authority docs/security guides, attachment token guidance, and offline samples; Authority tests rerun confirming ledger-token + anti-forgery behaviours.
|
||||
|
||||
## Advisory AI (Sprint 31)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-AIAI-31-001 | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Advisory AI scopes (`advisory-ai:view`, `advisory-ai:operate`, `advisory-ai:admin`) and remote inference toggles; update discovery metadata/offline defaults. | Scopes/flags published; integration tests cover RBAC + opt-in settings; docs updated. |
|
||||
| AUTH-AIAI-31-002 | DONE (2025-11-01) | Authority Core & Security Guild | AUTH-AIAI-31-001, AIAI-31-006 | Enforce anonymized prompt logging, tenant consent for remote inference, and audit logging of assistant tasks. | Logging/audit flows verified; privacy review passed; docs updated. |
|
||||
|
||||
## Export Center
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
|
||||
## Notifications Studio
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-NOTIFY-38-001 | DONE (2025-11-01) | Authority Core & Security Guild | — | Define `Notify.Viewer`, `Notify.Operator`, `Notify.Admin` scopes/roles, update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit defaults refreshed. |
|
||||
| AUTH-NOTIFY-40-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-NOTIFY-38-001, WEB-NOTIFY-40-001 | Implement signed ack token key rotation, webhook allowlists, admin-only escalation settings, and audit logging of ack actions. | Ack tokens signed/rotated; webhook allowlists enforced; admin enforcement validated; audit logs capture ack/resolution. |
|
||||
> 2025-11-02: `/notify/ack-tokens/rotate` exposed (notify.admin), emits `notify.ack.key_rotated|notify.ack.key_rotation_failed`, and DSSE rotation tests cover allowlist + scope enforcement.
|
||||
| AUTH-NOTIFY-42-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-NOTIFY-40-001 | Investigate ack token rotation 500 errors (test Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure still failing). Capture logs, identify root cause, and patch handler. | Failure mode understood; fix merged; regression test passes. |
|
||||
> 2025-11-02: Aliased `StellaOpsBearer` to the test auth handler, corrected bootstrap `/notifications/ack-tokens/rotate` defaults, and validated `Rotate_ReturnsBadRequest_WhenKeyIdMissing_AndAuditsFailure` via targeted `dotnet test`.
|
||||
|
||||
|
||||
## CLI Parity & Task Packs
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-PACKS-41-001 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-AOC-19-001 | Define CLI SSO profiles and pack scopes (`Packs.Read`, `Packs.Write`, `Packs.Run`, `Packs.Approve`), update discovery metadata, offline defaults, and issuer templates. | Scopes available; metadata updated; tests ensure enforcement; offline kit templates refreshed. |
|
||||
> 2025-11-02: Added Pack scope policies, Authority role defaults, and CLI profile guidance covering operator/publisher/approver flows.
|
||||
> 2025-11-02: Shared OpenSSL 1.1 shim feeds Authority & Signals Mongo2Go harnesses so pack scope coverage keeps running on OpenSSL 3 hosts (AUTH-PACKS-41-001).
|
||||
> 2025-11-04: Discovery metadata/OpenAPI advertise packs scopes, configs/offline kit templates bundle new roles, and Authority tests re-run to validate tenant gating for `packs.*`.
|
||||
| AUTH-PACKS-43-001 | BLOCKED (2025-10-27) | Authority Core & Security Guild | AUTH-PACKS-41-001, TASKRUN-42-001, ORCH-SVC-42-101 | Enforce pack signing policies, approval RBAC checks, CLI CI token scopes, and audit logging for approvals. | Signing policies enforced; approvals require correct roles; CI token scope tests pass; audit logs recorded. |
|
||||
> Blocked: Task Runner approval APIs (`ORCH-SVC-42-101`, `TASKRUN-42-001`) still outstanding. Pack scope catalog (AUTH-PACKS-41-001) landed 2025-11-04; resume once execution/approval contracts are published.
|
||||
|
||||
## Authority-Backed Scopes & Tenancy (Epic 14)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-28: Tidied advisory raw idempotency migration to avoid LINQ-on-`BsonValue` (explicit array copy) while continuing duplicate guardrail validation; scoped scanner/policy token call sites updated to honor new metadata parameter.
|
||||
| AUTH-TEN-49-001 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement service accounts & delegation tokens (`act` chain), per-tenant quotas, audit stream of auth decisions, and revocation APIs. | Service tokens minted with scopes/TTL; delegation logged; quotas configurable; audit stream live; docs updated. |
|
||||
> 2025-11-02: Authority bootstrap test harness now seeds service accounts via AuthorityDelegation options; `/internal/service-accounts` endpoints validated with targeted vstest run.
|
||||
> 2025-11-02: Added Mongo service-account store, seeded options/collection initializers, token persistence metadata (`tokenKind`, `serviceAccountId`, `actorChain`), and docs/config samples. Introduced quota checks + tests covering service account issuance and persistence.
|
||||
> 2025-11-02: Documented bootstrap service-account admin APIs in `docs/11_AUTHORITY.md`, noting API key requirements and stable upsert behaviour.
|
||||
> 2025-11-03: Seeded explicit enabled service-account fixtures for integration tests and reran `StellaOps.Authority.Tests` to greenlight `/internal/service-accounts` listing + revocation scenarios.
|
||||
> 2025-11-04: Confirmed service-account docs/config examples, quota tuning, and audit stream wiring; Authority suite re-executed to cover issuance/listing/revocation flows.
|
||||
|
||||
## Observability & Forensics (Epic 15)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-OBS-50-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-AOC-19-001 | Introduce scopes `obs:read`, `timeline:read`, `timeline:write`, `evidence:create`, `evidence:read`, `evidence:hold`, `attest:read`, and `obs:incident` (all tenant-scoped). Update discovery metadata, offline defaults, and scope grammar docs. | Scopes exposed via metadata; issuer templates updated; offline kit seeded; integration tests cover new scopes. |
|
||||
| AUTH-OBS-52-001 | DONE (2025-11-02) | Authority Core & Security Guild | AUTH-OBS-50-001, TIMELINE-OBS-52-003, EVID-OBS-53-003 | Configure resource server policies for Timeline Indexer, Evidence Locker, Exporter, and Observability APIs enforcing new scopes + tenant claims. Emit audit events including scope usage and trace IDs. | Policies deployed; unauthorized access blocked; audit logs prove scope usage; contract tests updated. |
|
||||
| AUTH-OBS-55-001 | DONE (2025-11-02) | Authority Core & Security Guild, Ops Guild | AUTH-OBS-50-001, WEB-OBS-55-001 | Harden incident mode authorization: require `obs:incident` scope + fresh auth, log activation reason, and expose verification endpoint for auditors. Update docs/runbooks. | Incident activate/deactivate requires scope; audit entries logged; docs updated with imposed rule reminder. |
|
||||
|
||||
## Air-Gapped Mode (Epic 16)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| AUTH-AIRGAP-56-001 | DONE (2025-11-04) | Authority Core & Security Guild | AIRGAP-CTL-56-001 | Provision new scopes (`airgap:seal`, `airgap:import`, `airgap:status:read`) in configuration metadata, offline kit defaults, and issuer templates. | Scopes exposed in discovery docs; offline kit updated; integration tests cover issuance. |
|
||||
| AUTH-AIRGAP-56-002 | DONE (2025-11-04) | Authority Core & Security Guild | AUTH-AIRGAP-56-001, AIRGAP-IMP-58-001 | Audit import actions with actor, tenant, bundle ID, and trace ID; expose `/authority/audit/airgap` endpoint. | Audit records persisted; endpoint paginates results; tests cover RBAC + filtering. |
|
||||
> 2025-11-04: Airgap scope constants are wired through discovery metadata, `etc/authority.yaml.sample`, and offline kit docs; scope issuance tests executed via `dotnet test`.
|
||||
> 2025-11-04: `/authority/audit/airgap` API persists tenant-scoped audit entries with pagination and authorization guards validated by the Authority integration suite (187 tests).
|
||||
| AUTH-AIRGAP-57-001 | BLOCKED (2025-11-01) | Authority Core & Security Guild, DevOps Guild | AUTH-AIRGAP-56-001, DEVOPS-AIRGAP-57-002 | Enforce sealed-mode CI gating by refusing token issuance when declared sealed install lacks sealing confirmation. | Awaiting clarified sealed-confirmation contract and configuration structure before implementation. |
|
||||
> 2025-11-01: AUTH-AIRGAP-57-001 blocked pending guidance on sealed-confirmation contract and configuration expectations before gating changes (Authority Core & Security Guild, DevOps Guild).
|
||||
|
||||
## SDKs & OpenAPI (Epic 17)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-28: Auth OpenAPI authored at `src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml` covering `/token`, `/introspect`, `/revoke`, `/jwks`, scope catalog, and error envelopes; parsed via PyYAML sanity check and referenced in Epic 17 docs.
|
||||
> 2025-10-28: Added `/.well-known/openapi` endpoint wiring cached spec metadata, YAML/JSON negotiation, HTTP cache headers, and tests verifying ETag + Accept handling. Authority spec (`src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml`) now includes grant/scope extensions.
|
||||
| AUTH-OAS-62-001 | DONE (2025-11-02) | Authority Core & Security Guild, SDK Generator Guild | AUTH-OAS-61-001, SDKGEN-63-001 | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | SDKs expose auth helpers; tests cover token issuance; docs updated. |
|
||||
> 2025-11-02: `AddStellaOpsApiAuthentication` shipped (OAuth2 + PAT), tenant header injection added, and client tests updated for caching behaviour.
|
||||
| AUTH-OAS-63-001 | DONE (2025-11-02) | Authority Core & Security Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and notifications for legacy auth endpoints. | Headers emitted; notifications verified; migration guide published. |
|
||||
> 2025-11-02: AUTH-OAS-63-001 completed — legacy OAuth shims emit Deprecation/Sunset/Warning headers, audit events captured, and migration guide published (Authority Core & Security Guild, API Governance Guild).
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|FEEDCONN-SHARED-STATE-003 Source state seeding helper|Tools Guild, BE-Conn-MSRC|Tools|**DOING (2025-10-19)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands. Prereqs confirmed none (2025-10-19).|
|
||||
|FEEDCONN-SHARED-STATE-003 Source state seeding helper|Tools Guild, BE-Conn-MSRC|Tools|**DONE (2025-11-04)** – Shipped `src/Tools/SourceStateSeeder` CLI plus `SourceStateSeedProcessor` APIs for programmatic seeding, with Mongo fixtures and MSRC runbook updates. Tests: `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj --no-build` (requires `libcrypto.so.1.1` for Mongo2Go when running outside CI).|
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|Fixture validation sweep|QA|None|**DOING (2025-10-19)** – Prereqs confirmed none; continuing RHSA fixture regeneration and diff review alongside mapper provenance updates.<br>2025-10-29: Added `scripts/update-redhat-fixtures.sh` to regenerate golden snapshots with `UPDATE_GOLDENS=1`; run it before reviews to capture CSAF contract deltas.|
|
||||
|Fixture validation sweep|QA|None|**DONE (2025-11-04)** – Regenerated RHSA golden fixtures with `scripts/update-redhat-fixtures.sh` (exports `UPDATE_GOLDENS=1`) and revalidated connector snapshots via `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj --no-restore`.|
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|Link-Not-Merge version provenance coordination|BE-Merge|CONCELIER-LNM-21-001|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) so they emit `advisory_observations.affected.versions[]` entries with provenance tags and deterministic comparison keys. Track rollout status in `docs/dev/normalized-rule-recipes.md` (now updated for Link-Not-Merge) and retire the legacy merge counters as coverage transitions to linkset validation metrics.<br>2025-10-29: Added new guidance in the doc for recording observation version metadata and logging gaps via `LinksetVersionCoverage` warnings to replace prior `concelier.merge.normalized_rules*` alerts.|
|
||||
|Link-Not-Merge version provenance coordination|BE-Merge|CONCELIER-LNM-21-001|**DONE (2025-11-04)** – Coordinated connector rollout: updated `docs/dev/normalized-rule-recipes.md` with a per-connector status table + follow-up IDs, enabled `Normalized version rules missing` diagnostics in `AdvisoryPrecedenceMerger`, and confirmed Linkset validation metrics reflect remaining upstream gaps (ACSC/CCCS/CERTBUND/Cisco/RU-BDU awaiting structured ranges).|
|
||||
|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** – Confirm Cccs/Cisco version-provenance updates land, capture `LinksetVersionCoverage` dashboard snapshots (expect zero missing-range warnings), and update coordination docs with the results.<br>2025-10-29: Observation metrics now surface `version_entries_total`/`missing_version_entries_total`; include screenshots for both when closing this task.|
|
||||
|FEEDMERGE-COORD-02-902 ICS-CISA version comparison support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, validate reuse of existing comparison helpers, and pre-stage Models ticket template only if a new firmware comparator is required. Document the outcome and observation coverage logs in coordination docs + tracker files.<br>2025-10-29: `docs/dev/normalized-rule-recipes.md` (§2–§3) now covers observation entries; attach decision summary + log sample when handing off to Models.|
|
||||
|FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** – Pair with KISA team on proposed firmware comparison helper (`kisa.build` or variant), ensure observation mapper alignment, and open Models ticket only if a new comparator is required. Log the final helper signature and observation coverage metrics in coordination docs + tracker files.|
|
||||
|
||||
@@ -120,7 +120,11 @@ builder.Services.AddSingleton<ILedgerEventRepository, PostgresLedgerEventReposit
|
||||
builder.Services.AddSingleton<IMerkleAnchorScheduler, PostgresMerkleAnchorScheduler>();
|
||||
builder.Services.AddSingleton<ILedgerEventStream, PostgresLedgerEventStream>();
|
||||
builder.Services.AddSingleton<IFindingProjectionRepository, PostgresFindingProjectionRepository>();
|
||||
builder.Services.AddSingleton<IPolicyEvaluationService, InlinePolicyEvaluationService>();
|
||||
builder.Services.AddHttpClient("ledger-policy-engine");
|
||||
builder.Services.AddSingleton<InlinePolicyEvaluationService>();
|
||||
builder.Services.AddSingleton<PolicyEvaluationCache>();
|
||||
builder.Services.AddSingleton<PolicyEngineEvaluationService>();
|
||||
builder.Services.AddSingleton<IPolicyEvaluationService>(sp => sp.GetRequiredService<PolicyEngineEvaluationService>());
|
||||
builder.Services.AddSingleton<ILedgerEventWriteService, LedgerEventWriteService>();
|
||||
builder.Services.AddHostedService<LedgerMerkleAnchorWorker>();
|
||||
builder.Services.AddHostedService<LedgerProjectionWorker>();
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
|
||||
@@ -0,0 +1,217 @@
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Json;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Options;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Policy;
|
||||
|
||||
internal sealed class PolicyEngineEvaluationService : IPolicyEvaluationService
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly HttpClient? _httpClient;
|
||||
private readonly InlinePolicyEvaluationService _fallback;
|
||||
private readonly PolicyEvaluationCache _cache;
|
||||
private readonly LedgerServiceOptions.PolicyEngineOptions _options;
|
||||
private readonly ILogger<PolicyEngineEvaluationService> _logger;
|
||||
|
||||
public PolicyEngineEvaluationService(
|
||||
IHttpClientFactory httpClientFactory,
|
||||
InlinePolicyEvaluationService fallback,
|
||||
PolicyEvaluationCache cache,
|
||||
IOptions<LedgerServiceOptions> options,
|
||||
ILogger<PolicyEngineEvaluationService> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(httpClientFactory);
|
||||
_fallback = fallback ?? throw new ArgumentNullException(nameof(fallback));
|
||||
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
|
||||
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value.PolicyEngine;
|
||||
|
||||
if (_options.BaseAddress is not null)
|
||||
{
|
||||
var client = httpClientFactory.CreateClient("ledger-policy-engine");
|
||||
client.BaseAddress = _options.BaseAddress;
|
||||
client.Timeout = _options.RequestTimeout;
|
||||
_httpClient = client;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<PolicyEvaluationResult> EvaluateAsync(
|
||||
LedgerEventRecord record,
|
||||
FindingProjection? existingProjection,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (record is null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(record));
|
||||
}
|
||||
|
||||
if (_httpClient is null)
|
||||
{
|
||||
return await _fallback.EvaluateAsync(record, existingProjection, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var cacheKey = CreateCacheKey(record, existingProjection);
|
||||
if (_cache.TryGet(cacheKey, out var cachedResult))
|
||||
{
|
||||
return cachedResult;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var requestBody = CreateRequest(record, existingProjection);
|
||||
using var request = new HttpRequestMessage(HttpMethod.Post, "policy/eval/batch")
|
||||
{
|
||||
Content = JsonContent.Create(requestBody, options: SerializerOptions)
|
||||
};
|
||||
|
||||
request.Headers.TryAddWithoutValidation(_options.TenantHeaderName, record.TenantId);
|
||||
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Policy engine evaluation request failed with status {StatusCode}. Falling back to inline evaluator.",
|
||||
response.StatusCode);
|
||||
return await _fallback.EvaluateAsync(record, existingProjection, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
|
||||
var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var result = ParseResponse(document.RootElement, record);
|
||||
_cache.Set(cacheKey, result);
|
||||
return result;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Policy engine evaluation failed; falling back to inline evaluation.");
|
||||
return await _fallback.EvaluateAsync(record, existingProjection, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
private static PolicyEvaluationCacheKey CreateCacheKey(LedgerEventRecord record, FindingProjection? existingProjection)
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
var eventBytes = JsonSerializer.SerializeToUtf8Bytes(record.EventBody, SerializerOptions);
|
||||
var hashBytes = sha.ComputeHash(eventBytes);
|
||||
var projectionHash = existingProjection?.CycleHash;
|
||||
return new PolicyEvaluationCacheKey(record.TenantId, record.PolicyVersion, record.EventId, projectionHash ?? Convert.ToHexString(hashBytes));
|
||||
}
|
||||
|
||||
private static JsonObject CreateRequest(LedgerEventRecord record, FindingProjection? existingProjection)
|
||||
{
|
||||
var batchItem = new JsonObject
|
||||
{
|
||||
["findingId"] = record.FindingId,
|
||||
["eventId"] = record.EventId.ToString(),
|
||||
["event"] = record.EventBody.DeepClone()
|
||||
};
|
||||
|
||||
if (existingProjection is not null)
|
||||
{
|
||||
batchItem["currentProjection"] = new JsonObject
|
||||
{
|
||||
["status"] = existingProjection.Status,
|
||||
["severity"] = existingProjection.Severity,
|
||||
["labels"] = existingProjection.Labels.DeepClone(),
|
||||
["explainRef"] = existingProjection.ExplainRef,
|
||||
["rationale"] = existingProjection.PolicyRationale.DeepClone()
|
||||
};
|
||||
}
|
||||
|
||||
var request = new JsonObject
|
||||
{
|
||||
["tenantId"] = record.TenantId,
|
||||
["policyVersion"] = record.PolicyVersion,
|
||||
["items"] = new JsonArray { batchItem }
|
||||
};
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
private static PolicyEvaluationResult ParseResponse(JsonElement response, LedgerEventRecord record)
|
||||
{
|
||||
if (!response.TryGetProperty("items", out var itemsElement) || itemsElement.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
throw new InvalidOperationException("Policy engine response missing 'items' array.");
|
||||
}
|
||||
|
||||
foreach (var item in itemsElement.EnumerateArray())
|
||||
{
|
||||
var findingId = item.GetPropertyOrDefault("findingId")?.GetString();
|
||||
if (!string.Equals(findingId, record.FindingId, StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var status = item.GetPropertyOrDefault("status")?.GetString();
|
||||
decimal? severity = null;
|
||||
var severityElement = item.GetPropertyOrDefault("severity");
|
||||
if (severityElement.HasValue && severityElement.Value.ValueKind == JsonValueKind.Number && severityElement.Value.TryGetDecimal(out var decimalSeverity))
|
||||
{
|
||||
severity = decimalSeverity;
|
||||
}
|
||||
|
||||
var labelsNode = new JsonObject();
|
||||
var labelsElement = item.GetPropertyOrDefault("labels");
|
||||
if (labelsElement.HasValue && labelsElement.Value.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
labelsNode = (JsonObject)labelsElement.Value.ToJsonNode()!;
|
||||
}
|
||||
var explainRef = item.GetPropertyOrDefault("explainRef")?.GetString();
|
||||
|
||||
JsonArray rationale;
|
||||
var rationaleElement = item.GetPropertyOrDefault("rationale");
|
||||
if (!rationaleElement.HasValue || rationaleElement.Value.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
rationale = new JsonArray();
|
||||
if (!string.IsNullOrWhiteSpace(explainRef))
|
||||
{
|
||||
rationale.Add(explainRef);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
rationale = (JsonArray)rationaleElement.Value.ToJsonNode()!;
|
||||
}
|
||||
|
||||
return new PolicyEvaluationResult(status, severity, labelsNode, explainRef, rationale);
|
||||
}
|
||||
|
||||
throw new InvalidOperationException("Policy engine response did not include evaluation for requested finding.");
|
||||
}
|
||||
}
|
||||
|
||||
internal static class JsonElementExtensions
|
||||
{
|
||||
public static JsonElement? GetPropertyOrDefault(this JsonElement element, string propertyName)
|
||||
{
|
||||
if (element.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return element.TryGetProperty(propertyName, out var value) ? value : null;
|
||||
}
|
||||
|
||||
public static JsonNode? ToJsonNode(this JsonElement element)
|
||||
{
|
||||
return JsonNode.Parse(element.GetRawText());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Options;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Infrastructure.Policy;
|
||||
|
||||
internal sealed record PolicyEvaluationCacheKey(string TenantId, string PolicyVersion, Guid EventId, string? ProjectionHash);
|
||||
|
||||
internal sealed class PolicyEvaluationCache : IDisposable
|
||||
{
|
||||
private readonly IMemoryCache _cache;
|
||||
private readonly ILogger<PolicyEvaluationCache> _logger;
|
||||
private bool _disposed;
|
||||
|
||||
public PolicyEvaluationCache(
|
||||
LedgerServiceOptions.PolicyEngineOptions options,
|
||||
ILogger<PolicyEvaluationCache> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
|
||||
_cache = new MemoryCache(new MemoryCacheOptions
|
||||
{
|
||||
SizeLimit = options.Cache.SizeLimit
|
||||
});
|
||||
|
||||
EntryLifetime = options.Cache.EntryLifetime;
|
||||
}
|
||||
|
||||
public TimeSpan EntryLifetime { get; }
|
||||
|
||||
public bool TryGet(PolicyEvaluationCacheKey key, out PolicyEvaluationResult result)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(key);
|
||||
|
||||
if (_cache.TryGetValue(key, out PolicyEvaluationResult? cached) && cached is not null)
|
||||
{
|
||||
_logger.LogTrace("Policy evaluation cache hit for tenant {Tenant} finding {Finding} policy {Policy}", key.TenantId, key.EventId, key.PolicyVersion);
|
||||
result = Clone(cached);
|
||||
return true;
|
||||
}
|
||||
|
||||
result = null!;
|
||||
return false;
|
||||
}
|
||||
|
||||
public void Set(PolicyEvaluationCacheKey key, PolicyEvaluationResult value)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(key);
|
||||
ArgumentNullException.ThrowIfNull(value);
|
||||
|
||||
var entryOptions = new MemoryCacheEntryOptions()
|
||||
.SetSize(1)
|
||||
.SetAbsoluteExpiration(EntryLifetime);
|
||||
|
||||
_cache.Set(key, Clone(value), entryOptions);
|
||||
}
|
||||
|
||||
private static PolicyEvaluationResult Clone(PolicyEvaluationResult result)
|
||||
{
|
||||
var labelsClone = result.Labels is null ? new JsonObject() : (JsonObject)result.Labels.DeepClone();
|
||||
var rationaleClone = result.Rationale is null ? new JsonArray() : CloneArray(result.Rationale);
|
||||
|
||||
return new PolicyEvaluationResult(
|
||||
result.Status,
|
||||
result.Severity,
|
||||
labelsClone,
|
||||
result.ExplainRef,
|
||||
rationaleClone);
|
||||
}
|
||||
|
||||
private static JsonArray CloneArray(JsonArray source)
|
||||
{
|
||||
var clone = new JsonArray();
|
||||
foreach (var item in source)
|
||||
{
|
||||
clone.Add(item?.DeepClone());
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_cache.Dispose();
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,8 @@ public sealed class LedgerServiceOptions
|
||||
|
||||
public ProjectionOptions Projection { get; init; } = new();
|
||||
|
||||
public PolicyEngineOptions PolicyEngine { get; init; } = new();
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(Database.ConnectionString))
|
||||
@@ -43,6 +45,8 @@ public sealed class LedgerServiceOptions
|
||||
{
|
||||
throw new InvalidOperationException("Projection idle delay must be greater than zero.");
|
||||
}
|
||||
|
||||
PolicyEngine.Validate();
|
||||
}
|
||||
|
||||
public sealed class DatabaseOptions
|
||||
@@ -90,4 +94,53 @@ public sealed class LedgerServiceOptions
|
||||
|
||||
public TimeSpan IdleDelay { get; set; } = DefaultIdleDelay;
|
||||
}
|
||||
|
||||
public sealed class PolicyEngineOptions
|
||||
{
|
||||
private const int DefaultCacheSizeLimit = 2048;
|
||||
private static readonly TimeSpan DefaultCacheEntryLifetime = TimeSpan.FromMinutes(30);
|
||||
private static readonly TimeSpan DefaultRequestTimeout = TimeSpan.FromSeconds(10);
|
||||
|
||||
public Uri? BaseAddress { get; set; }
|
||||
|
||||
public string TenantHeaderName { get; set; } = "X-Stella-Tenant";
|
||||
|
||||
public TimeSpan RequestTimeout { get; set; } = DefaultRequestTimeout;
|
||||
|
||||
public PolicyEngineCacheOptions Cache { get; init; } = new();
|
||||
|
||||
internal void Validate()
|
||||
{
|
||||
if (BaseAddress is not null && !BaseAddress.IsAbsoluteUri)
|
||||
{
|
||||
throw new InvalidOperationException("Policy engine base address must be an absolute URI.");
|
||||
}
|
||||
|
||||
if (Cache.SizeLimit <= 0)
|
||||
{
|
||||
throw new InvalidOperationException("Policy engine cache size limit must be greater than zero.");
|
||||
}
|
||||
|
||||
if (Cache.EntryLifetime <= TimeSpan.Zero)
|
||||
{
|
||||
throw new InvalidOperationException("Policy engine cache entry lifetime must be greater than zero.");
|
||||
}
|
||||
|
||||
if (RequestTimeout <= TimeSpan.Zero)
|
||||
{
|
||||
throw new InvalidOperationException("Policy engine request timeout must be greater than zero.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PolicyEngineCacheOptions
|
||||
{
|
||||
private const int DefaultCacheSizeLimit = 2048;
|
||||
private static readonly TimeSpan DefaultCacheEntryLifetime = TimeSpan.FromMinutes(30);
|
||||
|
||||
public int SizeLimit { get; set; } = DefaultCacheSizeLimit;
|
||||
|
||||
public TimeSpan EntryLifetime { get; set; } = DefaultCacheEntryLifetime;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Findings.Ledger.Tests")]
|
||||
@@ -14,6 +14,8 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Npgsql" Version="7.0.7" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
| LEDGER-29-001 | DONE (2025-11-03) | Findings Ledger Guild | AUTH-POLICY-27-001 | Design ledger & projection schemas (tables/indexes), canonical JSON format, hashing strategy, and migrations. Publish schema doc + fixtures.<br>2025-11-03: Initial PostgreSQL migration added with partitions/enums, fixtures seeded with canonical hashes, schema doc aligned. | Schemas committed; migrations generated; hashing documented; fixtures seeded for CI. |
|
||||
| LEDGER-29-002 | DONE (2025-11-03) | Findings Ledger Guild | LEDGER-29-001 | Implement ledger write API (`POST /vuln/ledger/events`) with validation, idempotency, hash chaining, and Merkle root computation job.<br>2025-11-03: Minimal web service scaffolded with canonical hashing, in-memory repository, Merkle scheduler stub, request/response contracts, and unit tests for hashing + conflict flows. | Events persisted with chained hashes; Merkle job emits anchors; unit/integration tests cover happy/pathological cases. |
|
||||
| LEDGER-29-003 | DONE (2025-11-03) | Findings Ledger Guild, Scheduler Guild | LEDGER-29-001 | Build projector worker that derives `findings_projection` rows from ledger events + policy determinations; ensure idempotent replay keyed by `(tenant,finding_id,policy_version)`. | Postgres-backed projector worker and reducers landed with replay checkpointing, fixtures, and tests. |
|
||||
| LEDGER-29-004 | DOING (2025-11-03) | Findings Ledger Guild, Policy Guild | LEDGER-29-003, POLICY-ENGINE-27-001 | Integrate Policy Engine batch evaluation (baseline + simulate) with projector; cache rationale references.<br>2025-11-04: Projection reducer now consumes policy evaluation output with rationale arrays; Postgres migration + fixtures/tests updated, awaiting Policy Engine API wiring for batch fetch. | Projector fetches determinations efficiently; rationale stored for UI; regression tests cover version switches. |
|
||||
| LEDGER-29-004 | DONE (2025-11-04) | Findings Ledger Guild, Policy Guild | LEDGER-29-003, POLICY-ENGINE-27-001 | Integrate Policy Engine batch evaluation (baseline + simulate) with projector; cache rationale references.<br>2025-11-04: Remote evaluation service wired via typed HttpClient, cache, and fallback inline evaluator; `/api/policy/eval/batch` documented; `policy_rationale` persisted with deterministic hashing; ledger tests `dotnet test src/Findings/__Tests/StellaOps.Findings.Ledger.Tests/StellaOps.Findings.Ledger.Tests.csproj --no-restore` green. | Projector fetches determinations efficiently; rationale stored for UI; regression tests cover version switches. |
|
||||
| LEDGER-29-005 | TODO | Findings Ledger Guild | LEDGER-29-003 | Implement workflow mutation handlers (assign, comment, accept-risk, target-fix, verify-fix, reopen) producing ledger events with validation and attachments metadata. | API endpoints enforce business rules; attachments metadata stored; tests cover state machine transitions. |
|
||||
| LEDGER-29-006 | TODO | Findings Ledger Guild, Security Guild | LEDGER-29-002 | Integrate attachment encryption (KMS envelope), signed URL issuance, CSRF protection hooks for Console. | Attachments encrypted and accessible via signed URLs; security tests verify expiry + scope. |
|
||||
| LEDGER-29-007 | TODO | Findings Ledger Guild, Observability Guild | LEDGER-29-002..005 | Instrument metrics (`ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`), structured logs, and Merkle anchoring alerts; publish dashboards. | Metrics/traces emitted; dashboards live; alert thresholds documented. |
|
||||
|
||||
@@ -65,6 +65,7 @@ public sealed class LedgerProjectionReducerTests
|
||||
new JsonObject(),
|
||||
Guid.NewGuid(),
|
||||
null,
|
||||
new JsonArray(),
|
||||
DateTimeOffset.UtcNow,
|
||||
string.Empty);
|
||||
var existingHash = ProjectionHashing.ComputeCycleHash(existing);
|
||||
@@ -112,6 +113,7 @@ public sealed class LedgerProjectionReducerTests
|
||||
labels,
|
||||
Guid.NewGuid(),
|
||||
null,
|
||||
new JsonArray(),
|
||||
DateTimeOffset.UtcNow,
|
||||
string.Empty);
|
||||
existing = existing with { CycleHash = ProjectionHashing.ComputeCycleHash(existing) };
|
||||
|
||||
@@ -0,0 +1,186 @@
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Infrastructure.Policy;
|
||||
using StellaOps.Findings.Ledger.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Tests;
|
||||
|
||||
public sealed class PolicyEngineEvaluationServiceTests
|
||||
{
|
||||
private const string TenantId = "tenant-1";
|
||||
private static readonly DateTimeOffset Now = DateTimeOffset.UtcNow;
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_UsesPolicyEngineAndCachesResult()
|
||||
{
|
||||
var handler = new StubHttpHandler(_ => new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = new StringContent(
|
||||
"""
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"findingId": "finding-1",
|
||||
"status": "affected",
|
||||
"severity": 7.5,
|
||||
"labels": { "exposure": "runtime" },
|
||||
"explainRef": "policy://explain/123",
|
||||
"rationale": ["policy://explain/123"]
|
||||
}
|
||||
]
|
||||
}
|
||||
""",
|
||||
System.Text.Encoding.UTF8,
|
||||
"application/json")
|
||||
});
|
||||
|
||||
var factory = new TestHttpClientFactory(handler);
|
||||
var options = CreateOptions(new Uri("https://policy.example/"));
|
||||
using var cache = new PolicyEvaluationCache(options.PolicyEngine, NullLogger<PolicyEvaluationCache>.Instance);
|
||||
var inline = new InlinePolicyEvaluationService(NullLogger<InlinePolicyEvaluationService>.Instance);
|
||||
var service = new PolicyEngineEvaluationService(factory, inline, cache, Microsoft.Extensions.Options.Options.Create(options), NullLogger<PolicyEngineEvaluationService>.Instance);
|
||||
|
||||
var record = CreateRecord();
|
||||
|
||||
var first = await service.EvaluateAsync(record, existingProjection: null, CancellationToken.None);
|
||||
var second = await service.EvaluateAsync(record, existingProjection: null, CancellationToken.None);
|
||||
|
||||
Assert.Equal("affected", first.Status);
|
||||
Assert.Equal(7.5m, first.Severity);
|
||||
Assert.Equal("policy://explain/123", first.ExplainRef);
|
||||
Assert.Equal("runtime", first.Labels?["exposure"]?.GetValue<string>());
|
||||
Assert.Equal(1, handler.CallCount); // cached second call
|
||||
Assert.Equal("affected", second.Status);
|
||||
Assert.Equal("policy://explain/123", second.Rationale[0]?.GetValue<string>());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_FallsBackToInlineWhenRequestFails()
|
||||
{
|
||||
var handler = new StubHttpHandler(_ => new HttpResponseMessage(HttpStatusCode.InternalServerError));
|
||||
var factory = new TestHttpClientFactory(handler);
|
||||
var options = CreateOptions(new Uri("https://policy.example/"));
|
||||
using var cache = new PolicyEvaluationCache(options.PolicyEngine, NullLogger<PolicyEvaluationCache>.Instance);
|
||||
var inline = new InlinePolicyEvaluationService(NullLogger<InlinePolicyEvaluationService>.Instance);
|
||||
var service = new PolicyEngineEvaluationService(factory, inline, cache, Microsoft.Extensions.Options.Options.Create(options), NullLogger<PolicyEngineEvaluationService>.Instance);
|
||||
|
||||
var record = CreateRecord(payloadStatus: "investigating", payloadSeverity: 4.2m);
|
||||
|
||||
var result = await service.EvaluateAsync(record, existingProjection: null, CancellationToken.None);
|
||||
|
||||
Assert.Equal("investigating", result.Status);
|
||||
Assert.Equal(4.2m, result.Severity);
|
||||
Assert.Equal(1, handler.CallCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_UsesInlineWhenNoBaseAddressConfigured()
|
||||
{
|
||||
var handler = new StubHttpHandler(_ => throw new InvalidOperationException("Handler should not be invoked."));
|
||||
var factory = new TestHttpClientFactory(handler);
|
||||
var options = CreateOptions(baseAddress: null);
|
||||
using var cache = new PolicyEvaluationCache(options.PolicyEngine, NullLogger<PolicyEvaluationCache>.Instance);
|
||||
var inline = new InlinePolicyEvaluationService(NullLogger<InlinePolicyEvaluationService>.Instance);
|
||||
var service = new PolicyEngineEvaluationService(factory, inline, cache, Microsoft.Extensions.Options.Options.Create(options), NullLogger<PolicyEngineEvaluationService>.Instance);
|
||||
|
||||
var record = CreateRecord(payloadStatus: "accepted_risk", payloadSeverity: 1.0m);
|
||||
|
||||
var result = await service.EvaluateAsync(record, existingProjection: null, CancellationToken.None);
|
||||
|
||||
Assert.Equal("accepted_risk", result.Status);
|
||||
Assert.Equal(1.0m, result.Severity);
|
||||
Assert.Equal(0, handler.CallCount);
|
||||
}
|
||||
|
||||
private static LedgerServiceOptions CreateOptions(Uri? baseAddress)
|
||||
{
|
||||
return new LedgerServiceOptions
|
||||
{
|
||||
PolicyEngine = new LedgerServiceOptions.PolicyEngineOptions
|
||||
{
|
||||
BaseAddress = baseAddress,
|
||||
Cache = new LedgerServiceOptions.PolicyEngineCacheOptions
|
||||
{
|
||||
SizeLimit = 16,
|
||||
EntryLifetime = TimeSpan.FromMinutes(5)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static LedgerEventRecord CreateRecord(string payloadStatus = "affected", decimal? payloadSeverity = 5.0m)
|
||||
{
|
||||
var payload = new JsonObject
|
||||
{
|
||||
["status"] = payloadStatus,
|
||||
["severity"] = payloadSeverity,
|
||||
["labels"] = new JsonObject { ["source"] = "policy" }
|
||||
};
|
||||
|
||||
var envelope = new JsonObject
|
||||
{
|
||||
["event"] = new JsonObject
|
||||
{
|
||||
["payload"] = payload
|
||||
}
|
||||
};
|
||||
|
||||
return new LedgerEventRecord(
|
||||
TenantId,
|
||||
Guid.NewGuid(),
|
||||
1,
|
||||
Guid.NewGuid(),
|
||||
LedgerEventConstants.EventFindingStatusChanged,
|
||||
"policy/v1",
|
||||
"finding-1",
|
||||
"artifact-1",
|
||||
null,
|
||||
"actor",
|
||||
"service",
|
||||
Now,
|
||||
Now,
|
||||
envelope,
|
||||
"hash",
|
||||
"prev",
|
||||
"leaf",
|
||||
envelope.ToJsonString());
|
||||
}
|
||||
|
||||
private sealed class StubHttpHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly Func<HttpRequestMessage, HttpResponseMessage> _handler;
|
||||
|
||||
public StubHttpHandler(Func<HttpRequestMessage, HttpResponseMessage> handler)
|
||||
{
|
||||
_handler = handler ?? throw new ArgumentNullException(nameof(handler));
|
||||
}
|
||||
|
||||
public int CallCount { get; private set; }
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
CallCount++;
|
||||
return Task.FromResult(_handler(request));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TestHttpClientFactory : IHttpClientFactory
|
||||
{
|
||||
private readonly HttpMessageHandler _handler;
|
||||
|
||||
public TestHttpClientFactory(HttpMessageHandler handler)
|
||||
{
|
||||
_handler = handler;
|
||||
}
|
||||
|
||||
public HttpClient CreateClient(string name)
|
||||
{
|
||||
return new HttpClient(_handler, disposeHandler: false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
# Issuer Directory Task Board — Epic 7
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| ISSUER-30-001 | DONE (2025-11-01) | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. |
|
||||
| ISSUER-30-002 | DONE (2025-11-01) | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. |
|
||||
| ISSUER-30-003 | DONE (2025-11-04) | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. |
|
||||
> 2025-11-04: `/issuer-directory/issuers/{id}/trust` endpoints deliver bounded overrides with audit logging, Mongo indexes seeded for uniqueness, config/docs updated, and core tests executed (`dotnet test`).
|
||||
| ISSUER-30-004 | DONE (2025-11-01) | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitor signature verification (client SDK, caching, retries). | Lens/Excitor resolve issuer metadata via SDK; integration tests cover network failures. |
|
||||
| ISSUER-30-005 | DONE (2025-11-01) | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. |
|
||||
| ISSUER-30-006 | DONE (2025-11-02) | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. |
|
||||
|
||||
> 2025-11-01: Excititor worker now queries Issuer Directory via during attestation verification, caching active key metadata and trust weights for tenant/global scopes.
|
||||
# Issuer Directory Task Board — Epic 7
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| ISSUER-30-001 | DONE (2025-11-01) | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. |
|
||||
| ISSUER-30-002 | DONE (2025-11-01) | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. |
|
||||
| ISSUER-30-003 | DONE (2025-11-04) | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. |
|
||||
> 2025-11-04: `/issuer-directory/issuers/{id}/trust` endpoints deliver bounded overrides with audit logging, Mongo indexes seeded for uniqueness, config/docs updated, and core tests executed (`dotnet test`).
|
||||
| ISSUER-30-004 | DONE (2025-11-01) | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitor signature verification (client SDK, caching, retries). | Lens/Excitor resolve issuer metadata via SDK; integration tests cover network failures. |
|
||||
| ISSUER-30-005 | DONE (2025-11-01) | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. |
|
||||
| ISSUER-30-006 | DONE (2025-11-02) | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. |
|
||||
|
||||
> 2025-11-01: Excititor worker now queries Issuer Directory via during attestation verification, caching active key metadata and trust weights for tenant/global scopes.
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.email",
|
||||
"displayName": "StellaOps Email Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Email.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"email"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "email"
|
||||
}
|
||||
}
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.email",
|
||||
"displayName": "StellaOps Email Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Email.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"email"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "email"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.slack",
|
||||
"displayName": "StellaOps Slack Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Slack.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"slack"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "slack",
|
||||
"org.stellaops.notify.connector.requiredScopes": "chat:write,chat:write.public"
|
||||
}
|
||||
}
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.slack",
|
||||
"displayName": "StellaOps Slack Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Slack.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"slack"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "slack",
|
||||
"org.stellaops.notify.connector.requiredScopes": "chat:write,chat:write.public"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.teams",
|
||||
"displayName": "StellaOps Teams Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Teams.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"teams"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "teams",
|
||||
"org.stellaops.notify.connector.cardVersion": "1.5"
|
||||
}
|
||||
}
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.teams",
|
||||
"displayName": "StellaOps Teams Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Teams.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"teams"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "teams",
|
||||
"org.stellaops.notify.connector.cardVersion": "1.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.webhook",
|
||||
"displayName": "StellaOps Webhook Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Webhook.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"webhook"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "webhook"
|
||||
}
|
||||
}
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"id": "stellaops.notify.connector.webhook",
|
||||
"displayName": "StellaOps Webhook Notify Connector",
|
||||
"version": "0.1.0-alpha",
|
||||
"requiresRestart": true,
|
||||
"entryPoint": {
|
||||
"type": "dotnet",
|
||||
"assembly": "StellaOps.Notify.Connectors.Webhook.dll"
|
||||
},
|
||||
"capabilities": [
|
||||
"notify-connector",
|
||||
"webhook"
|
||||
],
|
||||
"metadata": {
|
||||
"org.stellaops.notify.channel.type": "webhook"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,9 +55,13 @@ public sealed record ReportDocumentDto
|
||||
[JsonPropertyOrder(6)]
|
||||
public IReadOnlyList<PolicyPreviewVerdictDto> Verdicts { get; init; } = Array.Empty<PolicyPreviewVerdictDto>();
|
||||
|
||||
[JsonPropertyName("issues")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public IReadOnlyList<PolicyPreviewIssueDto> Issues { get; init; } = Array.Empty<PolicyPreviewIssueDto>();
|
||||
[JsonPropertyName("issues")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public IReadOnlyList<PolicyPreviewIssueDto> Issues { get; init; } = Array.Empty<PolicyPreviewIssueDto>();
|
||||
|
||||
[JsonPropertyName("surface")]
|
||||
[JsonPropertyOrder(8)]
|
||||
public SurfacePointersDto? Surface { get; init; }
|
||||
}
|
||||
|
||||
public sealed record ReportPolicyDto
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record ScanStatusResponse(
|
||||
string ScanId,
|
||||
string Status,
|
||||
ScanStatusTarget Image,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason);
|
||||
|
||||
public sealed record ScanStatusTarget(
|
||||
string? Reference,
|
||||
string? Digest);
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record ScanStatusResponse(
|
||||
string ScanId,
|
||||
string Status,
|
||||
ScanStatusTarget Image,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
string? FailureReason,
|
||||
SurfacePointersDto? Surface);
|
||||
|
||||
public sealed record ScanStatusTarget(
|
||||
string? Reference,
|
||||
string? Digest);
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record SurfacePointersDto
|
||||
{
|
||||
[JsonPropertyName("tenant")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Tenant { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("generatedAt")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public DateTimeOffset GeneratedAt { get; init; }
|
||||
= DateTimeOffset.UtcNow;
|
||||
|
||||
[JsonPropertyName("manifestDigest")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public string ManifestDigest { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("manifestUri")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public string? ManifestUri { get; init; }
|
||||
= null;
|
||||
|
||||
[JsonPropertyName("manifest")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public SurfaceManifestDocument Manifest { get; init; } = new();
|
||||
}
|
||||
|
||||
public sealed record SurfaceManifestDocument
|
||||
{
|
||||
[JsonPropertyName("schema")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Schema { get; init; } = "stellaops.surface.manifest@1";
|
||||
|
||||
[JsonPropertyName("tenant")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public string Tenant { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("imageDigest")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public string ImageDigest { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("generatedAt")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public DateTimeOffset GeneratedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
|
||||
[JsonPropertyName("artifacts")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public IReadOnlyList<SurfaceManifestArtifact> Artifacts { get; init; } = Array.Empty<SurfaceManifestArtifact>();
|
||||
}
|
||||
|
||||
public sealed record SurfaceManifestArtifact
|
||||
{
|
||||
[JsonPropertyName("kind")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("uri")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public string Uri { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public string Digest { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("mediaType")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public string MediaType { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("format")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public string Format { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("sizeBytes")]
|
||||
[JsonPropertyOrder(5)]
|
||||
public long SizeBytes { get; init; }
|
||||
= 0;
|
||||
|
||||
[JsonPropertyName("view")]
|
||||
[JsonPropertyOrder(6)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? View { get; init; }
|
||||
= null;
|
||||
}
|
||||
@@ -1,12 +1,16 @@
|
||||
using System.Diagnostics;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.WebService.Diagnostics;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.WebService.Diagnostics;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.Validation;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
@@ -56,27 +60,69 @@ internal static class HealthEndpoints
|
||||
return Json(document, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleReady(
|
||||
ServiceStatus status,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ApplyNoCache(context.Response);
|
||||
|
||||
await Task.CompletedTask;
|
||||
|
||||
status.RecordReadyCheck(success: true, latency: TimeSpan.Zero, error: null);
|
||||
var snapshot = status.CreateSnapshot();
|
||||
var ready = snapshot.Ready;
|
||||
|
||||
var document = new ReadyDocument(
|
||||
Status: ready.IsReady ? "ready" : "unready",
|
||||
CheckedAt: ready.CheckedAt,
|
||||
LatencyMs: ready.Latency?.TotalMilliseconds,
|
||||
Error: ready.Error);
|
||||
|
||||
return Json(document, StatusCodes.Status200OK);
|
||||
}
|
||||
private static async Task<IResult> HandleReady(
|
||||
ServiceStatus status,
|
||||
ISurfaceValidatorRunner validatorRunner,
|
||||
ISurfaceEnvironment surfaceEnvironment,
|
||||
ILoggerFactory loggerFactory,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ApplyNoCache(context.Response);
|
||||
|
||||
ArgumentNullException.ThrowIfNull(loggerFactory);
|
||||
|
||||
var logger = loggerFactory.CreateLogger("Scanner.WebService.Health");
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var success = true;
|
||||
string? error = null;
|
||||
|
||||
try
|
||||
{
|
||||
var validationContext = SurfaceValidationContext.Create(
|
||||
context.RequestServices,
|
||||
"Scanner.WebService.ReadyCheck",
|
||||
surfaceEnvironment.Settings,
|
||||
properties: new Dictionary<string, object?>
|
||||
{
|
||||
["path"] = context.Request.Path.ToString()
|
||||
});
|
||||
|
||||
await validatorRunner.EnsureAsync(validationContext, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (SurfaceValidationException ex)
|
||||
{
|
||||
success = false;
|
||||
error = ex.Message;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
success = false;
|
||||
error = ex.Message;
|
||||
logger.LogError(ex, "Surface validation failed during ready check.");
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopwatch.Stop();
|
||||
}
|
||||
|
||||
status.RecordReadyCheck(success, stopwatch.Elapsed, error);
|
||||
var snapshot = status.CreateSnapshot();
|
||||
var ready = snapshot.Ready;
|
||||
|
||||
var document = new ReadyDocument(
|
||||
Status: ready.IsReady ? "ready" : "unready",
|
||||
CheckedAt: ready.CheckedAt,
|
||||
LatencyMs: ready.Latency?.TotalMilliseconds,
|
||||
Error: ready.Error);
|
||||
|
||||
var statusCode = success ? StatusCodes.Status200OK : StatusCodes.Status503ServiceUnavailable;
|
||||
return Json(document, statusCode);
|
||||
}
|
||||
|
||||
private static void ApplyNoCache(HttpResponse response)
|
||||
{
|
||||
|
||||
@@ -1,17 +1,18 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
@@ -49,25 +50,30 @@ internal static class ReportEndpoints
|
||||
});
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleCreateReportAsync(
|
||||
ReportRequestDto request,
|
||||
PolicyPreviewService previewService,
|
||||
IReportSigner signer,
|
||||
TimeProvider timeProvider,
|
||||
IReportEventDispatcher eventDispatcher,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(previewService);
|
||||
ArgumentNullException.ThrowIfNull(signer);
|
||||
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||
ArgumentNullException.ThrowIfNull(eventDispatcher);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ImageDigest))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
private static async Task<IResult> HandleCreateReportAsync(
|
||||
ReportRequestDto request,
|
||||
PolicyPreviewService previewService,
|
||||
IReportSigner signer,
|
||||
TimeProvider timeProvider,
|
||||
IReportEventDispatcher eventDispatcher,
|
||||
ISurfacePointerService surfacePointerService,
|
||||
ILoggerFactory loggerFactory,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(previewService);
|
||||
ArgumentNullException.ThrowIfNull(signer);
|
||||
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||
ArgumentNullException.ThrowIfNull(eventDispatcher);
|
||||
ArgumentNullException.ThrowIfNull(surfacePointerService);
|
||||
ArgumentNullException.ThrowIfNull(loggerFactory);
|
||||
var logger = loggerFactory.CreateLogger("Scanner.WebService.Reports");
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ImageDigest))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid report request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
@@ -127,26 +133,46 @@ internal static class ReportEndpoints
|
||||
.ToArray();
|
||||
|
||||
var issuesDto = preview.Issues.Select(PolicyDtoMapper.ToIssueDto).ToArray();
|
||||
var summary = BuildSummary(projectedVerdicts);
|
||||
var verdict = ComputeVerdict(projectedVerdicts);
|
||||
var reportId = CreateReportId(request.ImageDigest!, preview.PolicyDigest);
|
||||
var generatedAt = timeProvider.GetUtcNow();
|
||||
|
||||
var document = new ReportDocumentDto
|
||||
{
|
||||
ReportId = reportId,
|
||||
ImageDigest = request.ImageDigest!,
|
||||
GeneratedAt = generatedAt,
|
||||
Verdict = verdict,
|
||||
Policy = new ReportPolicyDto
|
||||
{
|
||||
RevisionId = preview.RevisionId,
|
||||
Digest = preview.PolicyDigest
|
||||
},
|
||||
Summary = summary,
|
||||
Verdicts = projectedVerdicts,
|
||||
Issues = issuesDto
|
||||
};
|
||||
var summary = BuildSummary(projectedVerdicts);
|
||||
var verdict = ComputeVerdict(projectedVerdicts);
|
||||
var reportId = CreateReportId(request.ImageDigest!, preview.PolicyDigest);
|
||||
var generatedAt = timeProvider.GetUtcNow();
|
||||
SurfacePointersDto? surfacePointers = null;
|
||||
|
||||
try
|
||||
{
|
||||
surfacePointers = await surfacePointerService
|
||||
.TryBuildAsync(request.ImageDigest!, context.RequestAborted)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException) when (context.RequestAborted.IsCancellationRequested)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
if (!context.RequestAborted.IsCancellationRequested)
|
||||
{
|
||||
logger.LogDebug(ex, "Failed to build surface pointers for digest {Digest}.", request.ImageDigest);
|
||||
}
|
||||
}
|
||||
|
||||
var document = new ReportDocumentDto
|
||||
{
|
||||
ReportId = reportId,
|
||||
ImageDigest = request.ImageDigest!,
|
||||
GeneratedAt = generatedAt,
|
||||
Verdict = verdict,
|
||||
Policy = new ReportPolicyDto
|
||||
{
|
||||
RevisionId = preview.RevisionId,
|
||||
Digest = preview.PolicyDigest
|
||||
},
|
||||
Summary = summary,
|
||||
Verdicts = projectedVerdicts,
|
||||
Issues = issuesDto,
|
||||
Surface = surfacePointers
|
||||
};
|
||||
|
||||
var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(document, SerializerOptions);
|
||||
var signature = signer.Sign(payloadBytes);
|
||||
@@ -169,11 +195,11 @@ internal static class ReportEndpoints
|
||||
};
|
||||
}
|
||||
|
||||
var response = new ReportResponseDto
|
||||
{
|
||||
Report = document,
|
||||
Dsse = envelope
|
||||
};
|
||||
var response = new ReportResponseDto
|
||||
{
|
||||
Report = document,
|
||||
Dsse = envelope
|
||||
};
|
||||
|
||||
await eventDispatcher
|
||||
.PublishAsync(request, preview, document, envelope, context, cancellationToken)
|
||||
|
||||
@@ -140,10 +140,12 @@ internal static class ScanEndpoints
|
||||
private static async Task<IResult> HandleStatusAsync(
|
||||
string scanId,
|
||||
IScanCoordinator coordinator,
|
||||
ISurfacePointerService surfacePointerService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
ArgumentNullException.ThrowIfNull(surfacePointerService);
|
||||
|
||||
if (!ScanId.TryParse(scanId, out var parsed))
|
||||
{
|
||||
@@ -163,7 +165,23 @@ internal static class ScanEndpoints
|
||||
ProblemTypes.NotFound,
|
||||
"Scan not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Requested scan could not be located.");
|
||||
detail: "Requested scan could not be located.");
|
||||
}
|
||||
|
||||
SurfacePointersDto? surfacePointers = null;
|
||||
var digest = snapshot.Target.Digest;
|
||||
if (!string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
try
|
||||
{
|
||||
surfacePointers = await surfacePointerService
|
||||
.TryBuildAsync(digest!, context.RequestAborted)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
catch (OperationCanceledException) when (context.RequestAborted.IsCancellationRequested)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
var response = new ScanStatusResponse(
|
||||
@@ -172,7 +190,8 @@ internal static class ScanEndpoints
|
||||
Image: new ScanStatusTarget(snapshot.Target.Reference, snapshot.Target.Digest),
|
||||
CreatedAt: snapshot.CreatedAt,
|
||||
UpdatedAt: snapshot.UpdatedAt,
|
||||
FailureReason: snapshot.FailureReason);
|
||||
FailureReason: snapshot.FailureReason,
|
||||
Surface: surfacePointers);
|
||||
|
||||
return Json(response, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Scanner.Storage;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Options;
|
||||
|
||||
@@ -129,6 +130,8 @@ public sealed class ScannerWebServiceOptions
|
||||
|
||||
public int ObjectLockRetentionDays { get; set; } = 30;
|
||||
|
||||
public string RootPrefix { get; set; } = ScannerStorageDefaults.DefaultRootPrefix;
|
||||
|
||||
public string? ApiKey { get; set; }
|
||||
|
||||
public string ApiKeyHeader { get; set; } = string.Empty;
|
||||
|
||||
@@ -19,6 +19,10 @@ using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Cryptography.Plugin.BouncyCastle;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.Cache;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.FS;
|
||||
using StellaOps.Scanner.Surface.Secrets;
|
||||
using StellaOps.Scanner.Surface.Validation;
|
||||
using StellaOps.Scanner.WebService.Diagnostics;
|
||||
using StellaOps.Scanner.WebService.Endpoints;
|
||||
using StellaOps.Scanner.WebService.Extensions;
|
||||
@@ -80,11 +84,20 @@ builder.Services.AddSingleton<IScanProgressReader>(sp => sp.GetRequiredService<S
|
||||
builder.Services.AddSingleton<IScanCoordinator, InMemoryScanCoordinator>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<PolicySnapshotStore>();
|
||||
builder.Services.AddSingleton<PolicyPreviewService>();
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.AddBouncyCastleEd25519Provider();
|
||||
builder.Services.AddSingleton<PolicySnapshotStore>();
|
||||
builder.Services.AddSingleton<PolicyPreviewService>();
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.AddBouncyCastleEd25519Provider();
|
||||
builder.Services.AddSingleton<IReportSigner, ReportSigner>();
|
||||
builder.Services.AddSurfaceEnvironment(options =>
|
||||
{
|
||||
options.ComponentName = "Scanner.WebService";
|
||||
options.AddPrefix("SCANNER");
|
||||
});
|
||||
builder.Services.AddSurfaceValidation();
|
||||
builder.Services.AddSurfaceFileCache();
|
||||
builder.Services.AddSurfaceSecrets();
|
||||
builder.Services.AddSingleton<ISurfacePointerService, SurfacePointerService>();
|
||||
builder.Services.AddSingleton<IRedisConnectionFactory, RedisConnectionFactory>();
|
||||
if (bootstrapOptions.Events is { Enabled: true } eventsOptions
|
||||
&& string.Equals(eventsOptions.Driver, "redis", StringComparison.OrdinalIgnoreCase))
|
||||
@@ -119,6 +132,11 @@ builder.Services.AddScannerStorage(storageOptions =>
|
||||
storageOptions.ObjectStore.BucketName = bootstrapOptions.ArtifactStore.Bucket;
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(bootstrapOptions.ArtifactStore.RootPrefix))
|
||||
{
|
||||
storageOptions.ObjectStore.RootPrefix = bootstrapOptions.ArtifactStore.RootPrefix;
|
||||
}
|
||||
|
||||
var artifactDriver = bootstrapOptions.ArtifactStore.Driver?.Trim() ?? string.Empty;
|
||||
if (string.Equals(artifactDriver, ScannerStorageDefaults.ObjectStoreProviders.RustFs, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
|
||||
@@ -207,10 +207,7 @@ internal static class OrchestratorEventSerializer
|
||||
return;
|
||||
}
|
||||
|
||||
info.PolymorphismOptions ??= new JsonPolymorphismOptions
|
||||
{
|
||||
UnknownDerivedTypeHandling = JsonUnknownDerivedTypeHandling.Fail
|
||||
};
|
||||
info.PolymorphismOptions ??= new JsonPolymorphismOptions();
|
||||
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(ReportReadyEventPayload));
|
||||
AddDerivedType(info.PolymorphismOptions, typeof(ScanCompletedEventPayload));
|
||||
|
||||
@@ -0,0 +1,279 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Storage;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.ObjectStore;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal interface ISurfacePointerService
|
||||
{
|
||||
Task<SurfacePointersDto?> TryBuildAsync(string imageDigest, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class SurfacePointerService : ISurfacePointerService
|
||||
{
|
||||
private static readonly JsonSerializerOptions ManifestSerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly LinkRepository _linkRepository;
|
||||
private readonly ArtifactRepository _artifactRepository;
|
||||
private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor;
|
||||
private readonly ISurfaceEnvironment _surfaceEnvironment;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<SurfacePointerService> _logger;
|
||||
|
||||
public SurfacePointerService(
|
||||
LinkRepository linkRepository,
|
||||
ArtifactRepository artifactRepository,
|
||||
IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor,
|
||||
ISurfaceEnvironment surfaceEnvironment,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<SurfacePointerService> logger)
|
||||
{
|
||||
_linkRepository = linkRepository ?? throw new ArgumentNullException(nameof(linkRepository));
|
||||
_artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository));
|
||||
_optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||
_surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<SurfacePointersDto?> TryBuildAsync(string imageDigest, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(imageDigest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var normalizedDigest = imageDigest.Trim();
|
||||
|
||||
List<LinkDocument> links;
|
||||
try
|
||||
{
|
||||
links = await _linkRepository.ListBySourceAsync(LinkSourceType.Image, normalizedDigest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to load link documents for digest {Digest}.", normalizedDigest);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (links.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var options = _optionsMonitor.CurrentValue ?? new ScannerWebServiceOptions();
|
||||
var artifactStore = options.ArtifactStore ?? new ScannerWebServiceOptions.ArtifactStoreOptions();
|
||||
var bucket = ResolveBucket(artifactStore);
|
||||
var rootPrefix = artifactStore.RootPrefix ?? ScannerStorageDefaults.DefaultRootPrefix;
|
||||
var tenant = _surfaceEnvironment.Settings.Tenant;
|
||||
var generatedAt = _timeProvider.GetUtcNow();
|
||||
|
||||
var artifacts = ImmutableArray.CreateBuilder<SurfaceManifestArtifact>();
|
||||
|
||||
foreach (var link in links)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
ArtifactDocument? artifactDocument;
|
||||
try
|
||||
{
|
||||
artifactDocument = await _artifactRepository.GetAsync(link.ArtifactId, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to load artifact document {ArtifactId}.", link.ArtifactId);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (artifactDocument is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var objectKey = ArtifactObjectKeyBuilder.Build(
|
||||
artifactDocument.Type,
|
||||
artifactDocument.Format,
|
||||
artifactDocument.BytesSha256,
|
||||
rootPrefix);
|
||||
var uri = BuildCasUri(bucket, objectKey);
|
||||
var (kind, view) = MapKindAndView(artifactDocument);
|
||||
var format = MapFormat(artifactDocument.Format);
|
||||
var artifact = new SurfaceManifestArtifact
|
||||
{
|
||||
Kind = kind,
|
||||
Uri = uri,
|
||||
Digest = artifactDocument.BytesSha256,
|
||||
MediaType = artifactDocument.MediaType,
|
||||
Format = format,
|
||||
SizeBytes = artifactDocument.SizeBytes,
|
||||
View = view
|
||||
};
|
||||
artifacts.Add(artifact);
|
||||
}
|
||||
|
||||
if (artifacts.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var orderedArtifacts = artifacts.OrderBy(a => a.Kind, StringComparer.Ordinal)
|
||||
.ThenBy(a => a.Format, StringComparer.Ordinal)
|
||||
.ThenBy(a => a.Digest, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var manifest = new SurfaceManifestDocument
|
||||
{
|
||||
Tenant = tenant,
|
||||
ImageDigest = normalizedDigest,
|
||||
GeneratedAt = generatedAt,
|
||||
Artifacts = orderedArtifacts
|
||||
};
|
||||
|
||||
var manifestJson = JsonSerializer.SerializeToUtf8Bytes(manifest, ManifestSerializerOptions);
|
||||
var manifestDigest = ComputeDigest(manifestJson);
|
||||
var manifestUri = BuildManifestUri(bucket, rootPrefix, tenant, manifestDigest);
|
||||
|
||||
return new SurfacePointersDto
|
||||
{
|
||||
Tenant = tenant,
|
||||
GeneratedAt = generatedAt,
|
||||
ManifestDigest = manifestDigest,
|
||||
ManifestUri = manifestUri,
|
||||
Manifest = manifest with { GeneratedAt = generatedAt }
|
||||
};
|
||||
}
|
||||
|
||||
private static string ResolveBucket(ScannerWebServiceOptions.ArtifactStoreOptions artifactStore)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(artifactStore.Bucket))
|
||||
{
|
||||
return artifactStore.Bucket.Trim();
|
||||
}
|
||||
|
||||
return ScannerStorageDefaults.DefaultBucketName;
|
||||
}
|
||||
|
||||
private static string MapFormat(ArtifactDocumentFormat format)
|
||||
=> format switch
|
||||
{
|
||||
ArtifactDocumentFormat.CycloneDxJson => "cdx-json",
|
||||
ArtifactDocumentFormat.CycloneDxProtobuf => "cdx-protobuf",
|
||||
ArtifactDocumentFormat.SpdxJson => "spdx-json",
|
||||
ArtifactDocumentFormat.BomIndex => "bom-index",
|
||||
ArtifactDocumentFormat.DsseJson => "dsse-json",
|
||||
_ => format.ToString().ToLowerInvariant()
|
||||
};
|
||||
|
||||
private static (string Kind, string? View) MapKindAndView(ArtifactDocument document)
|
||||
{
|
||||
if (document.Type == ArtifactDocumentType.ImageBom)
|
||||
{
|
||||
var view = ResolveView(document.MediaType);
|
||||
var kind = string.Equals(view, "usage", StringComparison.OrdinalIgnoreCase)
|
||||
? "sbom-usage"
|
||||
: "sbom-inventory";
|
||||
return (kind, view);
|
||||
}
|
||||
|
||||
return document.Type switch
|
||||
{
|
||||
ArtifactDocumentType.LayerBom => ("layer-sbom", null),
|
||||
ArtifactDocumentType.Diff => ("diff", null),
|
||||
ArtifactDocumentType.Attestation => ("attestation", null),
|
||||
ArtifactDocumentType.Index => ("bom-index", null),
|
||||
_ => (document.Type.ToString().ToLowerInvariant(), null)
|
||||
};
|
||||
}
|
||||
|
||||
private static string? ResolveView(string mediaType)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(mediaType))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (mediaType.Contains("view=usage", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return "usage";
|
||||
}
|
||||
|
||||
if (mediaType.Contains("view=inventory", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return "inventory";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string BuildCasUri(string bucket, string key)
|
||||
{
|
||||
var normalizedKey = string.IsNullOrWhiteSpace(key) ? string.Empty : key.Trim().TrimStart('/');
|
||||
return $"cas://{bucket}/{normalizedKey}";
|
||||
}
|
||||
|
||||
private static string BuildManifestUri(string bucket, string rootPrefix, string tenant, string manifestDigest)
|
||||
{
|
||||
var (algorithm, digestValue) = SplitDigest(manifestDigest);
|
||||
var prefix = string.IsNullOrWhiteSpace(rootPrefix)
|
||||
? "surface/manifests"
|
||||
: $"{TrimTrailingSlash(rootPrefix)}/surface/manifests";
|
||||
|
||||
var head = digestValue.Length >= 4
|
||||
? $"{digestValue[..2]}/{digestValue[2..4]}"
|
||||
: digestValue;
|
||||
|
||||
var key = $"{prefix}/{tenant}/{algorithm}/{head}/{digestValue}.json";
|
||||
return $"cas://{bucket}/{key}";
|
||||
}
|
||||
|
||||
private static (string Algorithm, string Digest) SplitDigest(string digest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return ("sha256", digest ?? string.Empty);
|
||||
}
|
||||
|
||||
var parts = digest.Split(':', 2, StringSplitOptions.TrimEntries);
|
||||
if (parts.Length == 2)
|
||||
{
|
||||
return (parts[0], parts[1]);
|
||||
}
|
||||
|
||||
return ("sha256", digest);
|
||||
}
|
||||
|
||||
private static string TrimTrailingSlash(string value)
|
||||
=> string.IsNullOrWhiteSpace(value)
|
||||
? string.Empty
|
||||
: value.Trim().TrimEnd('/');
|
||||
|
||||
private static string ComputeDigest(ReadOnlySpan<byte> payload)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
if (!SHA256.TryHashData(payload, hash, out _))
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
hash = sha.ComputeHash(payload.ToArray());
|
||||
}
|
||||
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,10 @@
|
||||
<ProjectReference Include="../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Env/StellaOps.Scanner.Surface.Env.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Validation/StellaOps.Scanner.Surface.Validation.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj" />
|
||||
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCAN-REPLAY-186-001 | TODO | Scanner WebService Guild | REPLAY-CORE-185-001 | Implement scan `record` mode producing replay manifests/bundles, capture policy/feed/tool hashes, and update `docs/modules/scanner/architecture.md` referencing `docs/replay/DETERMINISTIC_REPLAY.md` Section 6. | API/worker integration tests cover record mode; docs merged; replay artifacts stored per spec. |
|
||||
| SCANNER-SURFACE-02 | DOING (2025-11-02) | Scanner WebService Guild | SURFACE-FS-02 | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata.<br>2025-11-02: Scan/report API responses now include preview CAS URIs; attestation metadata draft published. | OpenAPI updated; clients regenerated; integration tests validate pointer presence and tenancy. |
|
||||
| SCANNER-SURFACE-02 | DONE (2025-11-05) | Scanner WebService Guild | SURFACE-FS-02 | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata.<br>2025-11-05: Surface pointers projected through scan/report endpoints, orchestrator samples + DSSE fixtures refreshed with manifest block, readiness tests updated to use validator stub. | OpenAPI updated; clients regenerated; integration tests validate pointer presence and tenancy. |
|
||||
| SCANNER-ENV-02 | DOING (2025-11-02) | Scanner WebService Guild, Ops Guild | SURFACE-ENV-02 | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration.<br>2025-11-02: Cache root resolution switched to helper; feature flag bindings updated; Helm/Compose updates pending review. | Service uses helper; env table documented; helm/compose templates updated. |
|
||||
| SCANNER-SECRETS-02 | DOING (2025-11-02) | Scanner WebService Guild, Security Guild | SURFACE-SECRETS-02 | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens).<br>2025-11-02: Export/report flows now depend on Surface.Secrets stub; integration tests in progress. | Secrets fetched through shared provider; unit/integration tests cover rotation + failure cases. |
|
||||
| SCANNER-EVENTS-16-301 | BLOCKED (2025-10-26) | Scanner WebService Guild | ORCH-SVC-38-101, NOTIFY-SVC-38-001 | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Tests assert envelope schema + orchestrator publish; Notifier consumer harness passes; docs updated with new event contract. Blocked by .NET 10 preview OpenAPI/Auth dependency drift preventing `dotnet test` completion. |
|
||||
|
||||
@@ -0,0 +1,75 @@
|
||||
using System;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.ObjectStore;
|
||||
|
||||
/// <summary>
|
||||
/// Builds deterministic object keys for scanner artefacts stored in the backing object store.
|
||||
/// </summary>
|
||||
public static class ArtifactObjectKeyBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds an object key for the provided artefact metadata.
|
||||
/// </summary>
|
||||
/// <param name="type">Artefact type.</param>
|
||||
/// <param name="format">Artefact format.</param>
|
||||
/// <param name="digest">Content digest (with or without algorithm prefix).</param>
|
||||
/// <param name="rootPrefix">Optional root prefix to prepend (defaults to <c>scanner</c>).</param>
|
||||
/// <returns>Deterministic storage key.</returns>
|
||||
public static string Build(
|
||||
ArtifactDocumentType type,
|
||||
ArtifactDocumentFormat format,
|
||||
string digest,
|
||||
string? rootPrefix = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
|
||||
|
||||
var normalizedDigest = NormalizeDigest(digest);
|
||||
var digestValue = ExtractDigest(normalizedDigest);
|
||||
|
||||
var prefix = type switch
|
||||
{
|
||||
ArtifactDocumentType.LayerBom => ScannerStorageDefaults.ObjectPrefixes.Layers,
|
||||
ArtifactDocumentType.ImageBom => ScannerStorageDefaults.ObjectPrefixes.Images,
|
||||
ArtifactDocumentType.Index => ScannerStorageDefaults.ObjectPrefixes.Indexes,
|
||||
ArtifactDocumentType.Attestation => ScannerStorageDefaults.ObjectPrefixes.Attestations,
|
||||
ArtifactDocumentType.Diff => "diffs",
|
||||
_ => ScannerStorageDefaults.ObjectPrefixes.Images,
|
||||
};
|
||||
|
||||
var extension = format switch
|
||||
{
|
||||
ArtifactDocumentFormat.CycloneDxJson => "sbom.cdx.json",
|
||||
ArtifactDocumentFormat.CycloneDxProtobuf => "sbom.cdx.pb",
|
||||
ArtifactDocumentFormat.SpdxJson => "sbom.spdx.json",
|
||||
ArtifactDocumentFormat.BomIndex => "bom-index.bin",
|
||||
ArtifactDocumentFormat.DsseJson => "artifact.dsse.json",
|
||||
_ => "artifact.bin",
|
||||
};
|
||||
|
||||
var key = $"{prefix}/{digestValue}/{extension}";
|
||||
|
||||
if (string.IsNullOrWhiteSpace(rootPrefix))
|
||||
{
|
||||
return key;
|
||||
}
|
||||
|
||||
return $"{TrimTrailingSlash(rootPrefix)}/{key}";
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
=> digest.Contains(':', StringComparison.Ordinal)
|
||||
? digest.Trim()
|
||||
: $"sha256:{digest.Trim()}";
|
||||
|
||||
private static string ExtractDigest(string normalizedDigest)
|
||||
{
|
||||
var parts = normalizedDigest.Split(':', 2, StringSplitOptions.TrimEntries);
|
||||
return parts.Length == 2 ? parts[1] : normalizedDigest;
|
||||
}
|
||||
|
||||
private static string TrimTrailingSlash(string value)
|
||||
=> string.IsNullOrWhiteSpace(value)
|
||||
? string.Empty
|
||||
: value.Trim().TrimEnd('/');
|
||||
}
|
||||
@@ -50,8 +50,12 @@ public sealed class ArtifactStorageService
|
||||
try
|
||||
{
|
||||
var normalizedDigest = $"sha256:{digestHex}";
|
||||
var artifactId = CatalogIdFactory.CreateArtifactId(type, normalizedDigest);
|
||||
var key = BuildObjectKey(type, format, normalizedDigest);
|
||||
var artifactId = CatalogIdFactory.CreateArtifactId(type, normalizedDigest);
|
||||
var key = ArtifactObjectKeyBuilder.Build(
|
||||
type,
|
||||
format,
|
||||
normalizedDigest,
|
||||
_options.ObjectStore.RootPrefix);
|
||||
var descriptor = new ArtifactObjectDescriptor(
|
||||
_options.ObjectStore.BucketName,
|
||||
key,
|
||||
@@ -137,45 +141,4 @@ public sealed class ArtifactStorageService
|
||||
return (bufferStream, total, digestHex);
|
||||
}
|
||||
|
||||
private string BuildObjectKey(ArtifactDocumentType type, ArtifactDocumentFormat format, string digest)
|
||||
{
|
||||
var normalizedDigest = digest.Split(':', 2, StringSplitOptions.TrimEntries)[^1];
|
||||
var prefix = type switch
|
||||
{
|
||||
ArtifactDocumentType.LayerBom => ScannerStorageDefaults.ObjectPrefixes.Layers,
|
||||
ArtifactDocumentType.ImageBom => ScannerStorageDefaults.ObjectPrefixes.Images,
|
||||
ArtifactDocumentType.Diff => "diffs",
|
||||
ArtifactDocumentType.Index => ScannerStorageDefaults.ObjectPrefixes.Indexes,
|
||||
ArtifactDocumentType.Attestation => ScannerStorageDefaults.ObjectPrefixes.Attestations,
|
||||
_ => ScannerStorageDefaults.ObjectPrefixes.Images,
|
||||
};
|
||||
|
||||
var extension = format switch
|
||||
{
|
||||
ArtifactDocumentFormat.CycloneDxJson => "sbom.cdx.json",
|
||||
ArtifactDocumentFormat.CycloneDxProtobuf => "sbom.cdx.pb",
|
||||
ArtifactDocumentFormat.SpdxJson => "sbom.spdx.json",
|
||||
ArtifactDocumentFormat.BomIndex => "bom-index.bin",
|
||||
ArtifactDocumentFormat.DsseJson => "artifact.dsse.json",
|
||||
_ => "artifact.bin",
|
||||
};
|
||||
|
||||
var rootPrefix = _options.ObjectStore.RootPrefix;
|
||||
if (string.IsNullOrWhiteSpace(rootPrefix))
|
||||
{
|
||||
return $"{prefix}/{normalizedDigest}/{extension}";
|
||||
}
|
||||
|
||||
return $"{TrimTrailingSlash(rootPrefix)}/{prefix}/{normalizedDigest}/{extension}";
|
||||
}
|
||||
|
||||
private static string TrimTrailingSlash(string prefix)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(prefix))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
return prefix.TrimEnd('/');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Mongo2Go;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Mongo2Go;
|
||||
using StellaOps.Scanner.Surface.Validation;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
@@ -56,14 +59,17 @@ internal sealed class ScannerApplicationFactory : WebApplicationFactory<Program>
|
||||
Environment.SetEnvironmentVariable("SCANNER__AUTHORITY__CLIENTID", null);
|
||||
Environment.SetEnvironmentVariable("SCANNER__AUTHORITY__CLIENTSECRET", null);
|
||||
Environment.SetEnvironmentVariable("SCANNER__STORAGE__DSN", configuration["scanner:storage:dsn"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__QUEUE__DSN", configuration["scanner:queue:dsn"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__ENDPOINT", configuration["scanner:artifactStore:endpoint"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__ACCESSKEY", configuration["scanner:artifactStore:accessKey"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__SECRETKEY", configuration["scanner:artifactStore:secretKey"]);
|
||||
if (configuration.TryGetValue("scanner:events:enabled", out var eventsEnabled))
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", eventsEnabled);
|
||||
}
|
||||
Environment.SetEnvironmentVariable("SCANNER__QUEUE__DSN", configuration["scanner:queue:dsn"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__ENDPOINT", configuration["scanner:artifactStore:endpoint"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__ACCESSKEY", configuration["scanner:artifactStore:accessKey"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER__ARTIFACTSTORE__SECRETKEY", configuration["scanner:artifactStore:secretKey"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_ENDPOINT", "https://surface.local");
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_FS_BUCKET", configuration["scanner:artifactStore:bucket"]);
|
||||
Environment.SetEnvironmentVariable("SCANNER_SURFACE_PREFETCH_ENABLED", "false");
|
||||
if (configuration.TryGetValue("scanner:events:enabled", out var eventsEnabled))
|
||||
{
|
||||
Environment.SetEnvironmentVariable("SCANNER__EVENTS__ENABLED", eventsEnabled);
|
||||
}
|
||||
|
||||
if (configuration.TryGetValue("scanner:authority:enabled", out var authorityEnabled))
|
||||
{
|
||||
@@ -100,11 +106,13 @@ internal sealed class ScannerApplicationFactory : WebApplicationFactory<Program>
|
||||
configBuilder.AddInMemoryCollection(configuration);
|
||||
});
|
||||
|
||||
builder.ConfigureTestServices(services =>
|
||||
{
|
||||
configureServices?.Invoke(services);
|
||||
});
|
||||
}
|
||||
builder.ConfigureTestServices(services =>
|
||||
{
|
||||
configureServices?.Invoke(services);
|
||||
services.RemoveAll<ISurfaceValidatorRunner>();
|
||||
services.AddSingleton<ISurfaceValidatorRunner, TestSurfaceValidatorRunner>();
|
||||
});
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
@@ -163,6 +171,19 @@ internal sealed class ScannerApplicationFactory : WebApplicationFactory<Program>
|
||||
current = parent.FullName;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private sealed class TestSurfaceValidatorRunner : ISurfaceValidatorRunner
|
||||
{
|
||||
public ValueTask<SurfaceValidationResult> RunAllAsync(
|
||||
SurfaceValidationContext context,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult(SurfaceValidationResult.Success());
|
||||
|
||||
public ValueTask EnsureAsync(
|
||||
SurfaceValidationContext context,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,10 +54,10 @@ public sealed class ScansEndpointsTests
|
||||
Assert.Equal(payload.ScanId, status!.ScanId);
|
||||
Assert.Equal("Pending", status.Status);
|
||||
Assert.Equal("ghcr.io/demo/app:1.0.0", status.Image.Reference);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitScanIsDeterministicForIdenticalPayloads()
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitScanIsDeterministicForIdenticalPayloads()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
@@ -81,11 +81,98 @@ public sealed class ScansEndpointsTests
|
||||
Assert.Equal(firstPayload!.ScanId, secondPayload!.ScanId);
|
||||
Assert.True(firstPayload.Created);
|
||||
Assert.False(secondPayload.Created);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitScanValidatesImageDescriptor()
|
||||
{
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ScanStatusIncludesSurfacePointersWhenArtifactsExist()
|
||||
{
|
||||
const string digest = "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
|
||||
var digestValue = digest.Split(':', 2)[1];
|
||||
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
|
||||
using (var scope = factory.Services.CreateScope())
|
||||
{
|
||||
var artifactRepository = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
|
||||
var linkRepository = scope.ServiceProvider.GetRequiredService<LinkRepository>();
|
||||
var artifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, digest);
|
||||
|
||||
var artifact = new ArtifactDocument
|
||||
{
|
||||
Id = artifactId,
|
||||
Type = ArtifactDocumentType.ImageBom,
|
||||
Format = ArtifactDocumentFormat.CycloneDxJson,
|
||||
MediaType = "application/vnd.cyclonedx+json; version=1.6; view=inventory",
|
||||
BytesSha256 = digest,
|
||||
SizeBytes = 2048,
|
||||
Immutable = true,
|
||||
RefCount = 1,
|
||||
TtlClass = "default",
|
||||
CreatedAtUtc = DateTime.UtcNow,
|
||||
UpdatedAtUtc = DateTime.UtcNow
|
||||
};
|
||||
|
||||
await artifactRepository.UpsertAsync(artifact, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var link = new LinkDocument
|
||||
{
|
||||
Id = CatalogIdFactory.CreateLinkId(LinkSourceType.Image, digest, artifactId),
|
||||
FromType = LinkSourceType.Image,
|
||||
FromDigest = digest,
|
||||
ArtifactId = artifactId,
|
||||
CreatedAtUtc = DateTime.UtcNow
|
||||
};
|
||||
|
||||
await linkRepository.UpsertAsync(link, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
var submitRequest = new ScanSubmitRequest
|
||||
{
|
||||
Image = new ScanImageDescriptor
|
||||
{
|
||||
Digest = digest
|
||||
}
|
||||
};
|
||||
|
||||
var submitResponse = await client.PostAsJsonAsync("/api/v1/scans", submitRequest);
|
||||
submitResponse.EnsureSuccessStatusCode();
|
||||
|
||||
var submission = await submitResponse.Content.ReadFromJsonAsync<ScanSubmitResponse>();
|
||||
Assert.NotNull(submission);
|
||||
|
||||
var statusResponse = await client.GetAsync($"/api/v1/scans/{submission!.ScanId}");
|
||||
statusResponse.EnsureSuccessStatusCode();
|
||||
|
||||
var status = await statusResponse.Content.ReadFromJsonAsync<ScanStatusResponse>();
|
||||
Assert.NotNull(status);
|
||||
Assert.NotNull(status!.Surface);
|
||||
|
||||
var surface = status.Surface!;
|
||||
Assert.Equal("default", surface.Tenant);
|
||||
Assert.False(string.IsNullOrWhiteSpace(surface.ManifestDigest));
|
||||
Assert.NotNull(surface.ManifestUri);
|
||||
Assert.Contains("cas://scanner-artifacts/", surface.ManifestUri, StringComparison.Ordinal);
|
||||
|
||||
var manifest = surface.Manifest;
|
||||
Assert.Equal(digest, manifest.ImageDigest);
|
||||
Assert.Equal(surface.Tenant, manifest.Tenant);
|
||||
Assert.NotEqual(default, manifest.GeneratedAt);
|
||||
var manifestArtifact = Assert.Single(manifest.Artifacts);
|
||||
Assert.Equal("sbom-inventory", manifestArtifact.Kind);
|
||||
Assert.Equal("cdx-json", manifestArtifact.Format);
|
||||
Assert.Equal(digest, manifestArtifact.Digest);
|
||||
Assert.Equal("application/vnd.cyclonedx+json; version=1.6; view=inventory", manifestArtifact.MediaType);
|
||||
Assert.Equal("inventory", manifestArtifact.View);
|
||||
|
||||
var expectedUri = $"cas://scanner-artifacts/scanner/images/{digestValue}/sbom.cdx.json";
|
||||
Assert.Equal(expectedUri, manifestArtifact.Uri);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitScanValidatesImageDescriptor()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory();
|
||||
using var client = factory.CreateClient();
|
||||
|
||||
@@ -462,7 +549,7 @@ public sealed class ScansEndpointsTests
|
||||
var storedResult = new EntryTraceResult(scanId, "sha256:test", generatedAt, graph, ndjson);
|
||||
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configuration: null,
|
||||
configureConfiguration: null,
|
||||
services =>
|
||||
{
|
||||
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(storedResult));
|
||||
@@ -485,7 +572,7 @@ public sealed class ScansEndpointsTests
|
||||
public async Task GetEntryTraceReturnsNotFoundWhenMissing()
|
||||
{
|
||||
using var factory = new ScannerApplicationFactory(
|
||||
configuration: null,
|
||||
configureConfiguration: null,
|
||||
services =>
|
||||
{
|
||||
services.AddSingleton<IEntryTraceResultStore>(new StubEntryTraceResultStore(null));
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
namespace StellaOps.Scheduler.WebService.GraphJobs;
|
||||
|
||||
internal readonly record struct GraphJobUpdateResult<TJob>(bool Updated, TJob Job) where TJob : class
|
||||
{
|
||||
public static GraphJobUpdateResult<TJob> UpdatedResult(TJob job) => new(true, job);
|
||||
|
||||
public static GraphJobUpdateResult<TJob> NotUpdated(TJob job) => new(false, job);
|
||||
}
|
||||
namespace StellaOps.Scheduler.WebService.GraphJobs;
|
||||
|
||||
internal readonly record struct GraphJobUpdateResult<TJob>(bool Updated, TJob Job) where TJob : class
|
||||
{
|
||||
public static GraphJobUpdateResult<TJob> UpdatedResult(TJob job) => new(true, job);
|
||||
|
||||
public static GraphJobUpdateResult<TJob> NotUpdated(TJob job) => new(false, job);
|
||||
}
|
||||
|
||||
@@ -1,42 +1,42 @@
|
||||
# Scheduler WebService Task Board (Sprint 16)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
|
||||
## Policy Engine v2 (Sprint 20)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Added `/api/v1/scheduler/policy/runs` create/list/get endpoints with in-memory queue, scope/tenant enforcement, and contract docs (`docs/SCHED-WEB-20-001-POLICY-RUNS.md`). Tests cover happy path + auth failures.
|
||||
> 2025-10-26: Use canonical request/response samples from `samples/api/scheduler/policy-*.json`; serializer contract defined in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`.
|
||||
| SCHED-WEB-20-002 | BLOCKED (waiting on SCHED-WORKER-20-301) | Scheduler WebService Guild | SCHED-WEB-20-001, SCHED-WORKER-20-301 | Provide simulation trigger endpoint returning diff preview metadata and job state for UI/CLI consumption. | Simulation endpoint returns deterministic diffs metadata; rate limits enforced; tests cover concurrency. |
|
||||
> 2025-10-29: WebService requires Worker policy job orchestration + Policy Engine diff callbacks (POLICY-ENGINE-20-003/006) to provide simulation previews. Awaiting completion of SCHED-WORKER-20-301 before wiring API.
|
||||
|
||||
## Graph Explorer v1 (Sprint 21)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-WEB-21-004 | DONE (2025-11-04) | Scheduler WebService Guild, Scheduler Storage Guild | SCHED-WEB-21-001, SCHED-STORAGE-16-201 | Persist graph job lifecycle to Mongo storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer. | Storage repositories updated; events emitted; webhook payload documented; integration tests cover storage + event flow. **Note:** Events currently log JSON envelopes while the shared platform bus is provisioned. Cartographer webhook now posts JSON payloads when configured; replace inline logging with bus publisher once the shared event transport is online. |
|
||||
> 2025-10-30: Implemented Redis-backed publisher (`Scheduler:Events:GraphJobs`) emitting `scheduler.graph.job.completed@1` to configured stream with optional logging fallback; docs/configs to be validated with DevOps before closing.
|
||||
> 2025-11-04: Resumed SCHED-WEB-21-004 to finalize Mongo lifecycle persistence guards, graph completion events, and Cartographer webhook verification.
|
||||
> 2025-11-04: SCHED-WEB-21-004 completed – lifecycle stored in Mongo with optimistic concurrency, completion events/webhooks emitted once per transition, and result URI metadata refreshed idempotently with unit/integration coverage.
|
||||
|
||||
## StellaOps Console (Sprint 23)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-CONSOLE-23-001 | DONE (2025-11-03) | Scheduler WebService Guild, BE-Base Platform Guild | SCHED-WEB-16-103, SCHED-WEB-20-001 | Extend runs APIs with live progress SSE endpoints (`/console/runs/{id}/stream`), queue lag summaries, diff metadata fetch, retry/cancel hooks with RBAC enforcement, and deterministic pagination for history views consumed by Console. | SSE emits heartbeats/backoff headers, progress payload schema documented, unauthorized actions blocked in integration tests, metrics/logs expose queue lag + correlation IDs. |
|
||||
|
||||
## Policy Studio (Sprint 27)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-CONSOLE-27-001 | DONE (2025-11-03) | Scheduler WebService Guild, Policy Registry Guild | SCHED-WEB-16-103, REGISTRY-API-27-005 | Provide policy batch simulation orchestration endpoints (`/policies/simulations` POST/GET) exposing run creation, shard status, SSE progress, cancellation, and retries with RBAC enforcement. | API handles shard lifecycle with SSE heartbeats + retry headers; unauthorized requests rejected; integration tests cover submit/cancel/resume flows. |
|
||||
| SCHED-CONSOLE-27-002 | DOING (2025-11-03) | Scheduler WebService Guild, Observability Guild | SCHED-CONSOLE-27-001 | Emit telemetry endpoints/metrics (`policy_simulation_queue_depth`, `policy_simulation_latency`) and webhook callbacks for completion/failure consumed by Registry. | Metrics exposed via gateway, dashboards seeded, webhook contract documented, integration tests validate metrics emission. |
|
||||
|
||||
## Vulnerability Explorer (Sprint 29)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-VULN-29-001 | TODO | Scheduler WebService Guild, Findings Ledger Guild | SCHED-WEB-16-103, SBOM-VULN-29-001 | Expose resolver job APIs (`POST /vuln/resolver/jobs`, `GET /vuln/resolver/jobs/{id}`) to trigger candidate recomputation per artifact/policy change with RBAC and rate limits. | Resolver APIs documented; integration tests cover submit/status/cancel; unauthorized requests rejected. |
|
||||
| SCHED-VULN-29-002 | TODO | Scheduler WebService Guild, Observability Guild | SCHED-VULN-29-001 | Provide projector lag metrics endpoint and webhook notifications for backlog breaches consumed by DevOps dashboards. | Lag metrics exposed; webhook events triggered on thresholds; docs updated. |
|
||||
|
||||
## Notes
|
||||
- 2025-10-27: Minimal API host now wires Authority, health endpoints, and restart-only plug-in discovery per architecture §§1–2.
|
||||
# Scheduler WebService Task Board (Sprint 16)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
|
||||
## Policy Engine v2 (Sprint 20)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
> 2025-10-29: Added `/api/v1/scheduler/policy/runs` create/list/get endpoints with in-memory queue, scope/tenant enforcement, and contract docs (`docs/SCHED-WEB-20-001-POLICY-RUNS.md`). Tests cover happy path + auth failures.
|
||||
> 2025-10-26: Use canonical request/response samples from `samples/api/scheduler/policy-*.json`; serializer contract defined in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`.
|
||||
| SCHED-WEB-20-002 | BLOCKED (waiting on SCHED-WORKER-20-301) | Scheduler WebService Guild | SCHED-WEB-20-001, SCHED-WORKER-20-301 | Provide simulation trigger endpoint returning diff preview metadata and job state for UI/CLI consumption. | Simulation endpoint returns deterministic diffs metadata; rate limits enforced; tests cover concurrency. |
|
||||
> 2025-10-29: WebService requires Worker policy job orchestration + Policy Engine diff callbacks (POLICY-ENGINE-20-003/006) to provide simulation previews. Awaiting completion of SCHED-WORKER-20-301 before wiring API.
|
||||
|
||||
## Graph Explorer v1 (Sprint 21)
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-WEB-21-004 | DONE (2025-11-04) | Scheduler WebService Guild, Scheduler Storage Guild | SCHED-WEB-21-001, SCHED-STORAGE-16-201 | Persist graph job lifecycle to Mongo storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer. | Storage repositories updated; events emitted; webhook payload documented; integration tests cover storage + event flow. **Note:** Events currently log JSON envelopes while the shared platform bus is provisioned. Cartographer webhook now posts JSON payloads when configured; replace inline logging with bus publisher once the shared event transport is online. |
|
||||
> 2025-10-30: Implemented Redis-backed publisher (`Scheduler:Events:GraphJobs`) emitting `scheduler.graph.job.completed@1` to configured stream with optional logging fallback; docs/configs to be validated with DevOps before closing.
|
||||
> 2025-11-04: Resumed SCHED-WEB-21-004 to finalize Mongo lifecycle persistence guards, graph completion events, and Cartographer webhook verification.
|
||||
> 2025-11-04: SCHED-WEB-21-004 completed – lifecycle stored in Mongo with optimistic concurrency, completion events/webhooks emitted once per transition, and result URI metadata refreshed idempotently with unit/integration coverage.
|
||||
|
||||
## StellaOps Console (Sprint 23)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-CONSOLE-23-001 | DONE (2025-11-03) | Scheduler WebService Guild, BE-Base Platform Guild | SCHED-WEB-16-103, SCHED-WEB-20-001 | Extend runs APIs with live progress SSE endpoints (`/console/runs/{id}/stream`), queue lag summaries, diff metadata fetch, retry/cancel hooks with RBAC enforcement, and deterministic pagination for history views consumed by Console. | SSE emits heartbeats/backoff headers, progress payload schema documented, unauthorized actions blocked in integration tests, metrics/logs expose queue lag + correlation IDs. |
|
||||
|
||||
## Policy Studio (Sprint 27)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-CONSOLE-27-001 | DONE (2025-11-03) | Scheduler WebService Guild, Policy Registry Guild | SCHED-WEB-16-103, REGISTRY-API-27-005 | Provide policy batch simulation orchestration endpoints (`/policies/simulations` POST/GET) exposing run creation, shard status, SSE progress, cancellation, and retries with RBAC enforcement. | API handles shard lifecycle with SSE heartbeats + retry headers; unauthorized requests rejected; integration tests cover submit/cancel/resume flows. |
|
||||
| SCHED-CONSOLE-27-002 | DOING (2025-11-03) | Scheduler WebService Guild, Observability Guild | SCHED-CONSOLE-27-001 | Emit telemetry endpoints/metrics (`policy_simulation_queue_depth`, `policy_simulation_latency`) and webhook callbacks for completion/failure consumed by Registry. | Metrics exposed via gateway, dashboards seeded, webhook contract documented, integration tests validate metrics emission. |
|
||||
|
||||
## Vulnerability Explorer (Sprint 29)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-VULN-29-001 | TODO | Scheduler WebService Guild, Findings Ledger Guild | SCHED-WEB-16-103, SBOM-VULN-29-001 | Expose resolver job APIs (`POST /vuln/resolver/jobs`, `GET /vuln/resolver/jobs/{id}`) to trigger candidate recomputation per artifact/policy change with RBAC and rate limits. | Resolver APIs documented; integration tests cover submit/status/cancel; unauthorized requests rejected. |
|
||||
| SCHED-VULN-29-002 | TODO | Scheduler WebService Guild, Observability Guild | SCHED-VULN-29-001 | Provide projector lag metrics endpoint and webhook notifications for backlog breaches consumed by DevOps dashboards. | Lag metrics exposed; webhook events triggered on thresholds; docs updated. |
|
||||
|
||||
## Notes
|
||||
- 2025-10-27: Minimal API host now wires Authority, health endpoints, and restart-only plug-in discovery per architecture §§1–2.
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
|
||||
## Policy simulations
|
||||
|
||||
`/api/v1/scheduler/policies/simulations` orchestrates Policy Engine runs in `simulate` mode without mutating persisted findings.
|
||||
|
||||
- **Create** — `POST /api/v1/scheduler/policies/simulations` (scope `policy:simulate`) enqueues a simulation for `policyId`/`policyVersion`, respecting optional `metadata` and structured `inputs` (`sbomSet`, `advisoryCursor`, `vexCursor`, `captureExplain`). Returns `201 Created` with `simulation.runId` and status `queued`.
|
||||
- **List/Get** — `GET /api/v1/scheduler/policies/simulations` and `/.../{simulationId}` expose `PolicyRunStatus` documents filtered to `mode=simulate`, including attempt counts, stats, and cancellation markers.
|
||||
- **Cancel** — `POST /.../{simulationId}/cancel` records `cancellationRequested=true` (optional reason, timestamp) and immediately reflects the updated status; workers honour the flag on the next lease cycle.
|
||||
- **Retry** — `POST /.../{simulationId}/retry` clones a terminal simulation (cancelled/failed/succeeded) into a fresh job preserving inputs/metadata. Non-terminal runs yield `409 Conflict`.
|
||||
- **Stream** — `GET /.../{simulationId}/stream` emits SSE events (`initial`, `status`, `queueLag`, `heartbeat`, `completed`) with the latest `PolicyRunStatus`, enabling Console to render shard progress and cancellation state in real time.
|
||||
|
||||
Simulation APIs share the same deterministic pagination/metadata contracts as policy runs and surface queue depth snapshots via the existing scheduler queue metrics.
|
||||
@@ -1,70 +1,70 @@
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.WebService.GraphJobs;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.Storage.Mongo.Tests.Integration;
|
||||
|
||||
public sealed class GraphJobStoreTests
|
||||
{
|
||||
private static readonly DateTimeOffset OccurredAt = new(2025, 11, 4, 10, 30, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateAsync_SucceedsWhenExpectedStatusMatches()
|
||||
{
|
||||
using var harness = new SchedulerMongoTestHarness();
|
||||
var repository = new GraphJobRepository(harness.Context);
|
||||
var store = new MongoGraphJobStore(repository);
|
||||
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var running = GraphJobStateMachine.EnsureTransition(initial, GraphJobStatus.Running, OccurredAt, attempts: initial.Attempts);
|
||||
var completed = GraphJobStateMachine.EnsureTransition(running, GraphJobStatus.Completed, OccurredAt, attempts: running.Attempts + 1);
|
||||
|
||||
var updateResult = await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
|
||||
|
||||
Assert.True(updateResult.Updated);
|
||||
var persisted = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
|
||||
Assert.NotNull(persisted);
|
||||
Assert.Equal(GraphJobStatus.Completed, persisted!.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateAsync_ReturnsExistingWhenExpectedStatusMismatch()
|
||||
{
|
||||
using var harness = new SchedulerMongoTestHarness();
|
||||
var repository = new GraphJobRepository(harness.Context);
|
||||
var store = new MongoGraphJobStore(repository);
|
||||
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var running = GraphJobStateMachine.EnsureTransition(initial, GraphJobStatus.Running, OccurredAt, attempts: initial.Attempts);
|
||||
var completed = GraphJobStateMachine.EnsureTransition(running, GraphJobStatus.Completed, OccurredAt, attempts: running.Attempts + 1);
|
||||
|
||||
await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
|
||||
|
||||
var result = await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Updated);
|
||||
Assert.Equal(GraphJobStatus.Completed, result.Job.Status);
|
||||
}
|
||||
|
||||
private static GraphBuildJob CreateBuildJob()
|
||||
{
|
||||
var digest = "sha256:" + new string('b', 64);
|
||||
return new GraphBuildJob(
|
||||
id: "gbj_store_test",
|
||||
tenantId: "tenant-store",
|
||||
sbomId: "sbom-alpha",
|
||||
sbomVersionId: "sbom-alpha-v1",
|
||||
sbomDigest: digest,
|
||||
status: GraphJobStatus.Pending,
|
||||
trigger: GraphBuildJobTrigger.SbomVersion,
|
||||
createdAt: OccurredAt,
|
||||
metadata: null);
|
||||
}
|
||||
}
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.WebService.GraphJobs;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.Storage.Mongo.Tests.Integration;
|
||||
|
||||
public sealed class GraphJobStoreTests
|
||||
{
|
||||
private static readonly DateTimeOffset OccurredAt = new(2025, 11, 4, 10, 30, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateAsync_SucceedsWhenExpectedStatusMatches()
|
||||
{
|
||||
using var harness = new SchedulerMongoTestHarness();
|
||||
var repository = new GraphJobRepository(harness.Context);
|
||||
var store = new MongoGraphJobStore(repository);
|
||||
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var running = GraphJobStateMachine.EnsureTransition(initial, GraphJobStatus.Running, OccurredAt, attempts: initial.Attempts);
|
||||
var completed = GraphJobStateMachine.EnsureTransition(running, GraphJobStatus.Completed, OccurredAt, attempts: running.Attempts + 1);
|
||||
|
||||
var updateResult = await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
|
||||
|
||||
Assert.True(updateResult.Updated);
|
||||
var persisted = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
|
||||
Assert.NotNull(persisted);
|
||||
Assert.Equal(GraphJobStatus.Completed, persisted!.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateAsync_ReturnsExistingWhenExpectedStatusMismatch()
|
||||
{
|
||||
using var harness = new SchedulerMongoTestHarness();
|
||||
var repository = new GraphJobRepository(harness.Context);
|
||||
var store = new MongoGraphJobStore(repository);
|
||||
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var running = GraphJobStateMachine.EnsureTransition(initial, GraphJobStatus.Running, OccurredAt, attempts: initial.Attempts);
|
||||
var completed = GraphJobStateMachine.EnsureTransition(running, GraphJobStatus.Completed, OccurredAt, attempts: running.Attempts + 1);
|
||||
|
||||
await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
|
||||
|
||||
var result = await store.UpdateAsync(completed, GraphJobStatus.Pending, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Updated);
|
||||
Assert.Equal(GraphJobStatus.Completed, result.Job.Status);
|
||||
}
|
||||
|
||||
private static GraphBuildJob CreateBuildJob()
|
||||
{
|
||||
var digest = "sha256:" + new string('b', 64);
|
||||
return new GraphBuildJob(
|
||||
id: "gbj_store_test",
|
||||
tenantId: "tenant-store",
|
||||
sbomId: "sbom-alpha",
|
||||
sbomVersionId: "sbom-alpha-v1",
|
||||
sbomDigest: digest,
|
||||
status: GraphJobStatus.Pending,
|
||||
trigger: GraphBuildJobTrigger.SbomVersion,
|
||||
createdAt: OccurredAt,
|
||||
metadata: null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,218 +1,218 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.WebService.GraphJobs;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
public sealed class GraphJobServiceTests
|
||||
{
|
||||
private static readonly DateTimeOffset FixedTime = new(2025, 11, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public async Task CompleteBuildJob_PersistsMetadataAndPublishesOnce()
|
||||
{
|
||||
var store = new TrackingGraphJobStore();
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var clock = new FixedClock(FixedTime);
|
||||
var publisher = new RecordingPublisher();
|
||||
var webhook = new RecordingWebhookClient();
|
||||
var service = new GraphJobService(store, clock, publisher, webhook);
|
||||
|
||||
var request = new GraphJobCompletionRequest
|
||||
{
|
||||
JobId = initial.Id,
|
||||
JobType = GraphJobQueryType.Build,
|
||||
Status = GraphJobStatus.Completed,
|
||||
OccurredAt = FixedTime,
|
||||
GraphSnapshotId = "graph_snap_final ",
|
||||
ResultUri = "oras://cartographer/bundle ",
|
||||
CorrelationId = "corr-123 "
|
||||
};
|
||||
|
||||
var response = await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(GraphJobStatus.Completed, response.Status);
|
||||
Assert.Equal(1, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
|
||||
var stored = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
|
||||
Assert.NotNull(stored);
|
||||
Assert.Equal("graph_snap_final", stored!.GraphSnapshotId);
|
||||
Assert.Equal("corr-123", stored.CorrelationId);
|
||||
Assert.True(stored.Metadata.TryGetValue("resultUri", out var resultUri));
|
||||
Assert.Equal("oras://cartographer/bundle", resultUri);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompleteBuildJob_IsIdempotentWhenAlreadyCompleted()
|
||||
{
|
||||
var store = new TrackingGraphJobStore();
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var clock = new FixedClock(FixedTime);
|
||||
var publisher = new RecordingPublisher();
|
||||
var webhook = new RecordingWebhookClient();
|
||||
var service = new GraphJobService(store, clock, publisher, webhook);
|
||||
|
||||
var request = new GraphJobCompletionRequest
|
||||
{
|
||||
JobId = initial.Id,
|
||||
JobType = GraphJobQueryType.Build,
|
||||
Status = GraphJobStatus.Completed,
|
||||
OccurredAt = FixedTime,
|
||||
GraphSnapshotId = "graph_snap_final",
|
||||
ResultUri = "oras://cartographer/bundle",
|
||||
CorrelationId = "corr-123"
|
||||
};
|
||||
|
||||
await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
|
||||
var updateCountAfterFirst = store.BuildUpdateCount;
|
||||
|
||||
var secondResponse = await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(GraphJobStatus.Completed, secondResponse.Status);
|
||||
Assert.Equal(updateCountAfterFirst, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompleteBuildJob_UpdatesResultUriWithoutReemittingEvent()
|
||||
{
|
||||
var store = new TrackingGraphJobStore();
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var clock = new FixedClock(FixedTime);
|
||||
var publisher = new RecordingPublisher();
|
||||
var webhook = new RecordingWebhookClient();
|
||||
var service = new GraphJobService(store, clock, publisher, webhook);
|
||||
|
||||
var firstRequest = new GraphJobCompletionRequest
|
||||
{
|
||||
JobId = initial.Id,
|
||||
JobType = GraphJobQueryType.Build,
|
||||
Status = GraphJobStatus.Completed,
|
||||
OccurredAt = FixedTime,
|
||||
GraphSnapshotId = "graph_snap_final",
|
||||
ResultUri = null,
|
||||
CorrelationId = "corr-123"
|
||||
};
|
||||
|
||||
await service.CompleteJobAsync(initial.TenantId, firstRequest, CancellationToken.None);
|
||||
Assert.Equal(1, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
|
||||
var secondRequest = firstRequest with
|
||||
{
|
||||
ResultUri = "oras://cartographer/bundle-v2",
|
||||
OccurredAt = FixedTime.AddSeconds(30)
|
||||
};
|
||||
|
||||
var response = await service.CompleteJobAsync(initial.TenantId, secondRequest, CancellationToken.None);
|
||||
|
||||
Assert.Equal(GraphJobStatus.Completed, response.Status);
|
||||
Assert.Equal(2, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
|
||||
var stored = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
|
||||
Assert.NotNull(stored);
|
||||
Assert.True(stored!.Metadata.TryGetValue("resultUri", out var resultUri));
|
||||
Assert.Equal("oras://cartographer/bundle-v2", resultUri);
|
||||
}
|
||||
|
||||
private static GraphBuildJob CreateBuildJob()
|
||||
{
|
||||
var digest = "sha256:" + new string('a', 64);
|
||||
return new GraphBuildJob(
|
||||
id: "gbj_test",
|
||||
tenantId: "tenant-alpha",
|
||||
sbomId: "sbom-alpha",
|
||||
sbomVersionId: "sbom-alpha-v1",
|
||||
sbomDigest: digest,
|
||||
status: GraphJobStatus.Pending,
|
||||
trigger: GraphBuildJobTrigger.SbomVersion,
|
||||
createdAt: FixedTime,
|
||||
metadata: null);
|
||||
}
|
||||
|
||||
private sealed class TrackingGraphJobStore : IGraphJobStore
|
||||
{
|
||||
private readonly InMemoryGraphJobStore _inner = new();
|
||||
|
||||
public int BuildUpdateCount { get; private set; }
|
||||
|
||||
public int OverlayUpdateCount { get; private set; }
|
||||
|
||||
public ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
|
||||
=> _inner.AddAsync(job, cancellationToken);
|
||||
|
||||
public ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
|
||||
=> _inner.AddAsync(job, cancellationToken);
|
||||
|
||||
public ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
|
||||
=> _inner.GetJobsAsync(tenantId, query, cancellationToken);
|
||||
|
||||
public ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
|
||||
=> _inner.GetBuildJobAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
public ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
|
||||
=> _inner.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
|
||||
{
|
||||
BuildUpdateCount++;
|
||||
return await _inner.UpdateAsync(job, expectedStatus, cancellationToken);
|
||||
}
|
||||
|
||||
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
|
||||
{
|
||||
OverlayUpdateCount++;
|
||||
return await _inner.UpdateAsync(job, expectedStatus, cancellationToken);
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
|
||||
=> _inner.GetOverlayJobsAsync(tenantId, cancellationToken);
|
||||
}
|
||||
|
||||
private sealed class RecordingPublisher : IGraphJobCompletionPublisher
|
||||
{
|
||||
public List<GraphJobCompletionNotification> Notifications { get; } = new();
|
||||
|
||||
public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken)
|
||||
{
|
||||
Notifications.Add(notification);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class RecordingWebhookClient : ICartographerWebhookClient
|
||||
{
|
||||
public List<GraphJobCompletionNotification> Notifications { get; } = new();
|
||||
|
||||
public Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken)
|
||||
{
|
||||
Notifications.Add(notification);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FixedClock : ISystemClock
|
||||
{
|
||||
public FixedClock(DateTimeOffset utcNow)
|
||||
{
|
||||
UtcNow = utcNow;
|
||||
}
|
||||
|
||||
public DateTimeOffset UtcNow { get; set; }
|
||||
}
|
||||
}
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.WebService.GraphJobs;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
public sealed class GraphJobServiceTests
|
||||
{
|
||||
private static readonly DateTimeOffset FixedTime = new(2025, 11, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public async Task CompleteBuildJob_PersistsMetadataAndPublishesOnce()
|
||||
{
|
||||
var store = new TrackingGraphJobStore();
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var clock = new FixedClock(FixedTime);
|
||||
var publisher = new RecordingPublisher();
|
||||
var webhook = new RecordingWebhookClient();
|
||||
var service = new GraphJobService(store, clock, publisher, webhook);
|
||||
|
||||
var request = new GraphJobCompletionRequest
|
||||
{
|
||||
JobId = initial.Id,
|
||||
JobType = GraphJobQueryType.Build,
|
||||
Status = GraphJobStatus.Completed,
|
||||
OccurredAt = FixedTime,
|
||||
GraphSnapshotId = "graph_snap_final ",
|
||||
ResultUri = "oras://cartographer/bundle ",
|
||||
CorrelationId = "corr-123 "
|
||||
};
|
||||
|
||||
var response = await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(GraphJobStatus.Completed, response.Status);
|
||||
Assert.Equal(1, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
|
||||
var stored = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
|
||||
Assert.NotNull(stored);
|
||||
Assert.Equal("graph_snap_final", stored!.GraphSnapshotId);
|
||||
Assert.Equal("corr-123", stored.CorrelationId);
|
||||
Assert.True(stored.Metadata.TryGetValue("resultUri", out var resultUri));
|
||||
Assert.Equal("oras://cartographer/bundle", resultUri);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompleteBuildJob_IsIdempotentWhenAlreadyCompleted()
|
||||
{
|
||||
var store = new TrackingGraphJobStore();
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var clock = new FixedClock(FixedTime);
|
||||
var publisher = new RecordingPublisher();
|
||||
var webhook = new RecordingWebhookClient();
|
||||
var service = new GraphJobService(store, clock, publisher, webhook);
|
||||
|
||||
var request = new GraphJobCompletionRequest
|
||||
{
|
||||
JobId = initial.Id,
|
||||
JobType = GraphJobQueryType.Build,
|
||||
Status = GraphJobStatus.Completed,
|
||||
OccurredAt = FixedTime,
|
||||
GraphSnapshotId = "graph_snap_final",
|
||||
ResultUri = "oras://cartographer/bundle",
|
||||
CorrelationId = "corr-123"
|
||||
};
|
||||
|
||||
await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
|
||||
var updateCountAfterFirst = store.BuildUpdateCount;
|
||||
|
||||
var secondResponse = await service.CompleteJobAsync(initial.TenantId, request, CancellationToken.None);
|
||||
|
||||
Assert.Equal(GraphJobStatus.Completed, secondResponse.Status);
|
||||
Assert.Equal(updateCountAfterFirst, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompleteBuildJob_UpdatesResultUriWithoutReemittingEvent()
|
||||
{
|
||||
var store = new TrackingGraphJobStore();
|
||||
var initial = CreateBuildJob();
|
||||
await store.AddAsync(initial, CancellationToken.None);
|
||||
|
||||
var clock = new FixedClock(FixedTime);
|
||||
var publisher = new RecordingPublisher();
|
||||
var webhook = new RecordingWebhookClient();
|
||||
var service = new GraphJobService(store, clock, publisher, webhook);
|
||||
|
||||
var firstRequest = new GraphJobCompletionRequest
|
||||
{
|
||||
JobId = initial.Id,
|
||||
JobType = GraphJobQueryType.Build,
|
||||
Status = GraphJobStatus.Completed,
|
||||
OccurredAt = FixedTime,
|
||||
GraphSnapshotId = "graph_snap_final",
|
||||
ResultUri = null,
|
||||
CorrelationId = "corr-123"
|
||||
};
|
||||
|
||||
await service.CompleteJobAsync(initial.TenantId, firstRequest, CancellationToken.None);
|
||||
Assert.Equal(1, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
|
||||
var secondRequest = firstRequest with
|
||||
{
|
||||
ResultUri = "oras://cartographer/bundle-v2",
|
||||
OccurredAt = FixedTime.AddSeconds(30)
|
||||
};
|
||||
|
||||
var response = await service.CompleteJobAsync(initial.TenantId, secondRequest, CancellationToken.None);
|
||||
|
||||
Assert.Equal(GraphJobStatus.Completed, response.Status);
|
||||
Assert.Equal(2, store.BuildUpdateCount);
|
||||
Assert.Single(publisher.Notifications);
|
||||
Assert.Single(webhook.Notifications);
|
||||
|
||||
var stored = await store.GetBuildJobAsync(initial.TenantId, initial.Id, CancellationToken.None);
|
||||
Assert.NotNull(stored);
|
||||
Assert.True(stored!.Metadata.TryGetValue("resultUri", out var resultUri));
|
||||
Assert.Equal("oras://cartographer/bundle-v2", resultUri);
|
||||
}
|
||||
|
||||
private static GraphBuildJob CreateBuildJob()
|
||||
{
|
||||
var digest = "sha256:" + new string('a', 64);
|
||||
return new GraphBuildJob(
|
||||
id: "gbj_test",
|
||||
tenantId: "tenant-alpha",
|
||||
sbomId: "sbom-alpha",
|
||||
sbomVersionId: "sbom-alpha-v1",
|
||||
sbomDigest: digest,
|
||||
status: GraphJobStatus.Pending,
|
||||
trigger: GraphBuildJobTrigger.SbomVersion,
|
||||
createdAt: FixedTime,
|
||||
metadata: null);
|
||||
}
|
||||
|
||||
private sealed class TrackingGraphJobStore : IGraphJobStore
|
||||
{
|
||||
private readonly InMemoryGraphJobStore _inner = new();
|
||||
|
||||
public int BuildUpdateCount { get; private set; }
|
||||
|
||||
public int OverlayUpdateCount { get; private set; }
|
||||
|
||||
public ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
|
||||
=> _inner.AddAsync(job, cancellationToken);
|
||||
|
||||
public ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
|
||||
=> _inner.AddAsync(job, cancellationToken);
|
||||
|
||||
public ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
|
||||
=> _inner.GetJobsAsync(tenantId, query, cancellationToken);
|
||||
|
||||
public ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
|
||||
=> _inner.GetBuildJobAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
public ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
|
||||
=> _inner.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
|
||||
{
|
||||
BuildUpdateCount++;
|
||||
return await _inner.UpdateAsync(job, expectedStatus, cancellationToken);
|
||||
}
|
||||
|
||||
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
|
||||
{
|
||||
OverlayUpdateCount++;
|
||||
return await _inner.UpdateAsync(job, expectedStatus, cancellationToken);
|
||||
}
|
||||
|
||||
public ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
|
||||
=> _inner.GetOverlayJobsAsync(tenantId, cancellationToken);
|
||||
}
|
||||
|
||||
private sealed class RecordingPublisher : IGraphJobCompletionPublisher
|
||||
{
|
||||
public List<GraphJobCompletionNotification> Notifications { get; } = new();
|
||||
|
||||
public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken)
|
||||
{
|
||||
Notifications.Add(notification);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class RecordingWebhookClient : ICartographerWebhookClient
|
||||
{
|
||||
public List<GraphJobCompletionNotification> Notifications { get; } = new();
|
||||
|
||||
public Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken)
|
||||
{
|
||||
Notifications.Add(notification);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FixedClock : ISystemClock
|
||||
{
|
||||
public FixedClock(DateTimeOffset utcNow)
|
||||
{
|
||||
UtcNow = utcNow;
|
||||
}
|
||||
|
||||
public DateTimeOffset UtcNow { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public interface IPackRunStepExecutor
|
||||
{
|
||||
Task<PackRunStepExecutionResult> ExecuteAsync(
|
||||
PackRunExecutionStep step,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed record PackRunStepExecutionResult(bool Succeeded, string? Error = null);
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public interface IPackRunStepExecutor
|
||||
{
|
||||
Task<PackRunStepExecutionResult> ExecuteAsync(
|
||||
PackRunExecutionStep step,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed record PackRunStepExecutionResult(bool Succeeded, string? Error = null);
|
||||
|
||||
@@ -1,86 +1,86 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed class PackRunExecutionGraph
|
||||
{
|
||||
public static readonly TaskPackPlanFailurePolicy DefaultFailurePolicy = new(1, 0, ContinueOnError: false);
|
||||
|
||||
public PackRunExecutionGraph(IReadOnlyList<PackRunExecutionStep> steps, TaskPackPlanFailurePolicy? failurePolicy)
|
||||
{
|
||||
Steps = steps ?? throw new ArgumentNullException(nameof(steps));
|
||||
FailurePolicy = failurePolicy ?? DefaultFailurePolicy;
|
||||
}
|
||||
|
||||
public IReadOnlyList<PackRunExecutionStep> Steps { get; }
|
||||
|
||||
public TaskPackPlanFailurePolicy FailurePolicy { get; }
|
||||
}
|
||||
|
||||
public enum PackRunStepKind
|
||||
{
|
||||
Unknown = 0,
|
||||
Run,
|
||||
GateApproval,
|
||||
GatePolicy,
|
||||
Parallel,
|
||||
Map
|
||||
}
|
||||
|
||||
public sealed class PackRunExecutionStep
|
||||
{
|
||||
public PackRunExecutionStep(
|
||||
string id,
|
||||
string templateId,
|
||||
PackRunStepKind kind,
|
||||
bool enabled,
|
||||
string? uses,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
string? approvalId,
|
||||
string? gateMessage,
|
||||
int? maxParallel,
|
||||
bool continueOnError,
|
||||
IReadOnlyList<PackRunExecutionStep> children)
|
||||
{
|
||||
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(id)) : id;
|
||||
TemplateId = string.IsNullOrWhiteSpace(templateId) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(templateId)) : templateId;
|
||||
Kind = kind;
|
||||
Enabled = enabled;
|
||||
Uses = uses;
|
||||
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
|
||||
ApprovalId = approvalId;
|
||||
GateMessage = gateMessage;
|
||||
MaxParallel = maxParallel;
|
||||
ContinueOnError = continueOnError;
|
||||
Children = children ?? throw new ArgumentNullException(nameof(children));
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
|
||||
public string TemplateId { get; }
|
||||
|
||||
public PackRunStepKind Kind { get; }
|
||||
|
||||
public bool Enabled { get; }
|
||||
|
||||
public string? Uses { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, TaskPackPlanParameterValue> Parameters { get; }
|
||||
|
||||
public string? ApprovalId { get; }
|
||||
|
||||
public string? GateMessage { get; }
|
||||
|
||||
public int? MaxParallel { get; }
|
||||
|
||||
public bool ContinueOnError { get; }
|
||||
|
||||
public IReadOnlyList<PackRunExecutionStep> Children { get; }
|
||||
|
||||
public static IReadOnlyDictionary<string, TaskPackPlanParameterValue> EmptyParameters { get; } =
|
||||
new ReadOnlyDictionary<string, TaskPackPlanParameterValue>(new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal));
|
||||
|
||||
public static IReadOnlyList<PackRunExecutionStep> EmptyChildren { get; } =
|
||||
Array.Empty<PackRunExecutionStep>();
|
||||
}
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed class PackRunExecutionGraph
|
||||
{
|
||||
public static readonly TaskPackPlanFailurePolicy DefaultFailurePolicy = new(1, 0, ContinueOnError: false);
|
||||
|
||||
public PackRunExecutionGraph(IReadOnlyList<PackRunExecutionStep> steps, TaskPackPlanFailurePolicy? failurePolicy)
|
||||
{
|
||||
Steps = steps ?? throw new ArgumentNullException(nameof(steps));
|
||||
FailurePolicy = failurePolicy ?? DefaultFailurePolicy;
|
||||
}
|
||||
|
||||
public IReadOnlyList<PackRunExecutionStep> Steps { get; }
|
||||
|
||||
public TaskPackPlanFailurePolicy FailurePolicy { get; }
|
||||
}
|
||||
|
||||
public enum PackRunStepKind
|
||||
{
|
||||
Unknown = 0,
|
||||
Run,
|
||||
GateApproval,
|
||||
GatePolicy,
|
||||
Parallel,
|
||||
Map
|
||||
}
|
||||
|
||||
public sealed class PackRunExecutionStep
|
||||
{
|
||||
public PackRunExecutionStep(
|
||||
string id,
|
||||
string templateId,
|
||||
PackRunStepKind kind,
|
||||
bool enabled,
|
||||
string? uses,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
string? approvalId,
|
||||
string? gateMessage,
|
||||
int? maxParallel,
|
||||
bool continueOnError,
|
||||
IReadOnlyList<PackRunExecutionStep> children)
|
||||
{
|
||||
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(id)) : id;
|
||||
TemplateId = string.IsNullOrWhiteSpace(templateId) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(templateId)) : templateId;
|
||||
Kind = kind;
|
||||
Enabled = enabled;
|
||||
Uses = uses;
|
||||
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
|
||||
ApprovalId = approvalId;
|
||||
GateMessage = gateMessage;
|
||||
MaxParallel = maxParallel;
|
||||
ContinueOnError = continueOnError;
|
||||
Children = children ?? throw new ArgumentNullException(nameof(children));
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
|
||||
public string TemplateId { get; }
|
||||
|
||||
public PackRunStepKind Kind { get; }
|
||||
|
||||
public bool Enabled { get; }
|
||||
|
||||
public string? Uses { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, TaskPackPlanParameterValue> Parameters { get; }
|
||||
|
||||
public string? ApprovalId { get; }
|
||||
|
||||
public string? GateMessage { get; }
|
||||
|
||||
public int? MaxParallel { get; }
|
||||
|
||||
public bool ContinueOnError { get; }
|
||||
|
||||
public IReadOnlyList<PackRunExecutionStep> Children { get; }
|
||||
|
||||
public static IReadOnlyDictionary<string, TaskPackPlanParameterValue> EmptyParameters { get; } =
|
||||
new ReadOnlyDictionary<string, TaskPackPlanParameterValue>(new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal));
|
||||
|
||||
public static IReadOnlyList<PackRunExecutionStep> EmptyChildren { get; } =
|
||||
Array.Empty<PackRunExecutionStep>();
|
||||
}
|
||||
|
||||
@@ -1,77 +1,77 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed class PackRunExecutionGraphBuilder
|
||||
{
|
||||
public PackRunExecutionGraph Build(TaskPackPlan plan)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(plan);
|
||||
|
||||
var steps = plan.Steps.Select(ConvertStep).ToList();
|
||||
var failurePolicy = plan.FailurePolicy;
|
||||
return new PackRunExecutionGraph(steps, failurePolicy);
|
||||
}
|
||||
|
||||
private static PackRunExecutionStep ConvertStep(TaskPackPlanStep step)
|
||||
{
|
||||
var kind = DetermineKind(step.Type);
|
||||
var parameters = step.Parameters is null
|
||||
? PackRunExecutionStep.EmptyParameters
|
||||
: new ReadOnlyDictionary<string, TaskPackPlanParameterValue>(
|
||||
new Dictionary<string, TaskPackPlanParameterValue>(step.Parameters, StringComparer.Ordinal));
|
||||
|
||||
var children = step.Children is null
|
||||
? PackRunExecutionStep.EmptyChildren
|
||||
: step.Children.Select(ConvertStep).ToList();
|
||||
|
||||
var maxParallel = TryGetInt(parameters, "maxParallel");
|
||||
var continueOnError = TryGetBool(parameters, "continueOnError");
|
||||
|
||||
return new PackRunExecutionStep(
|
||||
step.Id,
|
||||
step.TemplateId,
|
||||
kind,
|
||||
step.Enabled,
|
||||
step.Uses,
|
||||
parameters,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
maxParallel,
|
||||
continueOnError,
|
||||
children);
|
||||
}
|
||||
|
||||
private static PackRunStepKind DetermineKind(string? type)
|
||||
=> type switch
|
||||
{
|
||||
"run" => PackRunStepKind.Run,
|
||||
"gate.approval" => PackRunStepKind.GateApproval,
|
||||
"gate.policy" => PackRunStepKind.GatePolicy,
|
||||
"parallel" => PackRunStepKind.Parallel,
|
||||
"map" => PackRunStepKind.Map,
|
||||
_ => PackRunStepKind.Unknown
|
||||
};
|
||||
|
||||
private static int? TryGetInt(IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters, string key)
|
||||
{
|
||||
if (!parameters.TryGetValue(key, out var value) || value.Value is not JsonValue jsonValue)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return jsonValue.TryGetValue<int>(out var result) ? result : null;
|
||||
}
|
||||
|
||||
private static bool TryGetBool(IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters, string key)
|
||||
{
|
||||
if (!parameters.TryGetValue(key, out var value) || value.Value is not JsonValue jsonValue)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return jsonValue.TryGetValue<bool>(out var result) && result;
|
||||
}
|
||||
}
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed class PackRunExecutionGraphBuilder
|
||||
{
|
||||
public PackRunExecutionGraph Build(TaskPackPlan plan)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(plan);
|
||||
|
||||
var steps = plan.Steps.Select(ConvertStep).ToList();
|
||||
var failurePolicy = plan.FailurePolicy;
|
||||
return new PackRunExecutionGraph(steps, failurePolicy);
|
||||
}
|
||||
|
||||
private static PackRunExecutionStep ConvertStep(TaskPackPlanStep step)
|
||||
{
|
||||
var kind = DetermineKind(step.Type);
|
||||
var parameters = step.Parameters is null
|
||||
? PackRunExecutionStep.EmptyParameters
|
||||
: new ReadOnlyDictionary<string, TaskPackPlanParameterValue>(
|
||||
new Dictionary<string, TaskPackPlanParameterValue>(step.Parameters, StringComparer.Ordinal));
|
||||
|
||||
var children = step.Children is null
|
||||
? PackRunExecutionStep.EmptyChildren
|
||||
: step.Children.Select(ConvertStep).ToList();
|
||||
|
||||
var maxParallel = TryGetInt(parameters, "maxParallel");
|
||||
var continueOnError = TryGetBool(parameters, "continueOnError");
|
||||
|
||||
return new PackRunExecutionStep(
|
||||
step.Id,
|
||||
step.TemplateId,
|
||||
kind,
|
||||
step.Enabled,
|
||||
step.Uses,
|
||||
parameters,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
maxParallel,
|
||||
continueOnError,
|
||||
children);
|
||||
}
|
||||
|
||||
private static PackRunStepKind DetermineKind(string? type)
|
||||
=> type switch
|
||||
{
|
||||
"run" => PackRunStepKind.Run,
|
||||
"gate.approval" => PackRunStepKind.GateApproval,
|
||||
"gate.policy" => PackRunStepKind.GatePolicy,
|
||||
"parallel" => PackRunStepKind.Parallel,
|
||||
"map" => PackRunStepKind.Map,
|
||||
_ => PackRunStepKind.Unknown
|
||||
};
|
||||
|
||||
private static int? TryGetInt(IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters, string key)
|
||||
{
|
||||
if (!parameters.TryGetValue(key, out var value) || value.Value is not JsonValue jsonValue)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return jsonValue.TryGetValue<int>(out var result) ? result : null;
|
||||
}
|
||||
|
||||
private static bool TryGetBool(IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters, string key)
|
||||
{
|
||||
if (!parameters.TryGetValue(key, out var value) || value.Value is not JsonValue jsonValue)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return jsonValue.TryGetValue<bool>(out var result) && result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,159 +1,159 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public static class PackRunGateStateUpdater
|
||||
{
|
||||
public static PackRunGateStateUpdateResult Apply(
|
||||
PackRunState state,
|
||||
PackRunExecutionGraph graph,
|
||||
PackRunApprovalCoordinator coordinator,
|
||||
DateTimeOffset timestamp)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
ArgumentNullException.ThrowIfNull(graph);
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
|
||||
var approvals = coordinator.GetApprovals()
|
||||
.SelectMany(approval => approval.StepIds.Select(stepId => (stepId, approval)))
|
||||
.GroupBy(tuple => tuple.stepId, StringComparer.Ordinal)
|
||||
.ToDictionary(
|
||||
group => group.Key,
|
||||
group => group.First().approval,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
var mutable = new Dictionary<string, PackRunStepStateRecord>(state.Steps, StringComparer.Ordinal);
|
||||
var changed = false;
|
||||
var hasBlockingFailure = false;
|
||||
|
||||
foreach (var step in EnumerateSteps(graph.Steps))
|
||||
{
|
||||
if (!mutable.TryGetValue(step.Id, out var record))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (step.Kind)
|
||||
{
|
||||
case PackRunStepKind.GateApproval:
|
||||
if (!approvals.TryGetValue(step.Id, out var approvalState))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (approvalState.Status)
|
||||
{
|
||||
case PackRunApprovalStatus.Pending:
|
||||
break;
|
||||
|
||||
case PackRunApprovalStatus.Approved:
|
||||
if (record.Status != PackRunStepExecutionStatus.Succeeded || record.StatusReason is not null)
|
||||
{
|
||||
mutable[step.Id] = record with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Succeeded,
|
||||
StatusReason = null,
|
||||
LastTransitionAt = timestamp,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case PackRunApprovalStatus.Rejected:
|
||||
case PackRunApprovalStatus.Expired:
|
||||
var failureReason = BuildFailureReason(approvalState);
|
||||
if (record.Status != PackRunStepExecutionStatus.Failed ||
|
||||
!string.Equals(record.StatusReason, failureReason, StringComparison.Ordinal))
|
||||
{
|
||||
mutable[step.Id] = record with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Failed,
|
||||
StatusReason = failureReason,
|
||||
LastTransitionAt = timestamp,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
hasBlockingFailure = true;
|
||||
break;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case PackRunStepKind.GatePolicy:
|
||||
if (record.Status == PackRunStepExecutionStatus.Pending &&
|
||||
string.Equals(record.StatusReason, "requires-policy", StringComparison.Ordinal))
|
||||
{
|
||||
mutable[step.Id] = record with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Succeeded,
|
||||
StatusReason = null,
|
||||
LastTransitionAt = timestamp,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed)
|
||||
{
|
||||
return new PackRunGateStateUpdateResult(state, hasBlockingFailure);
|
||||
}
|
||||
|
||||
var updatedState = state with
|
||||
{
|
||||
UpdatedAt = timestamp,
|
||||
Steps = new ReadOnlyDictionary<string, PackRunStepStateRecord>(mutable)
|
||||
};
|
||||
|
||||
return new PackRunGateStateUpdateResult(updatedState, hasBlockingFailure);
|
||||
}
|
||||
|
||||
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
|
||||
{
|
||||
if (steps.Count == 0)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
foreach (var step in steps)
|
||||
{
|
||||
yield return step;
|
||||
|
||||
if (step.Children.Count > 0)
|
||||
{
|
||||
foreach (var child in EnumerateSteps(step.Children))
|
||||
{
|
||||
yield return child;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildFailureReason(PackRunApprovalState state)
|
||||
{
|
||||
var baseReason = state.Status switch
|
||||
{
|
||||
PackRunApprovalStatus.Rejected => "approval-rejected",
|
||||
PackRunApprovalStatus.Expired => "approval-expired",
|
||||
_ => "approval-invalid"
|
||||
};
|
||||
|
||||
if (string.IsNullOrWhiteSpace(state.Summary))
|
||||
{
|
||||
return baseReason;
|
||||
}
|
||||
|
||||
var summary = state.Summary.Trim();
|
||||
return $"{baseReason}:{summary}";
|
||||
}
|
||||
}
|
||||
|
||||
public readonly record struct PackRunGateStateUpdateResult(PackRunState State, bool HasBlockingFailure);
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public static class PackRunGateStateUpdater
|
||||
{
|
||||
public static PackRunGateStateUpdateResult Apply(
|
||||
PackRunState state,
|
||||
PackRunExecutionGraph graph,
|
||||
PackRunApprovalCoordinator coordinator,
|
||||
DateTimeOffset timestamp)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
ArgumentNullException.ThrowIfNull(graph);
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
|
||||
var approvals = coordinator.GetApprovals()
|
||||
.SelectMany(approval => approval.StepIds.Select(stepId => (stepId, approval)))
|
||||
.GroupBy(tuple => tuple.stepId, StringComparer.Ordinal)
|
||||
.ToDictionary(
|
||||
group => group.Key,
|
||||
group => group.First().approval,
|
||||
StringComparer.Ordinal);
|
||||
|
||||
var mutable = new Dictionary<string, PackRunStepStateRecord>(state.Steps, StringComparer.Ordinal);
|
||||
var changed = false;
|
||||
var hasBlockingFailure = false;
|
||||
|
||||
foreach (var step in EnumerateSteps(graph.Steps))
|
||||
{
|
||||
if (!mutable.TryGetValue(step.Id, out var record))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (step.Kind)
|
||||
{
|
||||
case PackRunStepKind.GateApproval:
|
||||
if (!approvals.TryGetValue(step.Id, out var approvalState))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
switch (approvalState.Status)
|
||||
{
|
||||
case PackRunApprovalStatus.Pending:
|
||||
break;
|
||||
|
||||
case PackRunApprovalStatus.Approved:
|
||||
if (record.Status != PackRunStepExecutionStatus.Succeeded || record.StatusReason is not null)
|
||||
{
|
||||
mutable[step.Id] = record with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Succeeded,
|
||||
StatusReason = null,
|
||||
LastTransitionAt = timestamp,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case PackRunApprovalStatus.Rejected:
|
||||
case PackRunApprovalStatus.Expired:
|
||||
var failureReason = BuildFailureReason(approvalState);
|
||||
if (record.Status != PackRunStepExecutionStatus.Failed ||
|
||||
!string.Equals(record.StatusReason, failureReason, StringComparison.Ordinal))
|
||||
{
|
||||
mutable[step.Id] = record with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Failed,
|
||||
StatusReason = failureReason,
|
||||
LastTransitionAt = timestamp,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
hasBlockingFailure = true;
|
||||
break;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case PackRunStepKind.GatePolicy:
|
||||
if (record.Status == PackRunStepExecutionStatus.Pending &&
|
||||
string.Equals(record.StatusReason, "requires-policy", StringComparison.Ordinal))
|
||||
{
|
||||
mutable[step.Id] = record with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Succeeded,
|
||||
StatusReason = null,
|
||||
LastTransitionAt = timestamp,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
changed = true;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed)
|
||||
{
|
||||
return new PackRunGateStateUpdateResult(state, hasBlockingFailure);
|
||||
}
|
||||
|
||||
var updatedState = state with
|
||||
{
|
||||
UpdatedAt = timestamp,
|
||||
Steps = new ReadOnlyDictionary<string, PackRunStepStateRecord>(mutable)
|
||||
};
|
||||
|
||||
return new PackRunGateStateUpdateResult(updatedState, hasBlockingFailure);
|
||||
}
|
||||
|
||||
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
|
||||
{
|
||||
if (steps.Count == 0)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
foreach (var step in steps)
|
||||
{
|
||||
yield return step;
|
||||
|
||||
if (step.Children.Count > 0)
|
||||
{
|
||||
foreach (var child in EnumerateSteps(step.Children))
|
||||
{
|
||||
yield return child;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string BuildFailureReason(PackRunApprovalState state)
|
||||
{
|
||||
var baseReason = state.Status switch
|
||||
{
|
||||
PackRunApprovalStatus.Rejected => "approval-rejected",
|
||||
PackRunApprovalStatus.Expired => "approval-expired",
|
||||
_ => "approval-invalid"
|
||||
};
|
||||
|
||||
if (string.IsNullOrWhiteSpace(state.Summary))
|
||||
{
|
||||
return baseReason;
|
||||
}
|
||||
|
||||
var summary = state.Summary.Trim();
|
||||
return $"{baseReason}:{summary}";
|
||||
}
|
||||
}
|
||||
|
||||
public readonly record struct PackRunGateStateUpdateResult(PackRunState State, bool HasBlockingFailure);
|
||||
|
||||
@@ -1,50 +1,50 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed record PackRunState(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> Steps)
|
||||
{
|
||||
public static PackRunState Create(
|
||||
string runId,
|
||||
string planHash,
|
||||
TaskPackPlanFailurePolicy failurePolicy,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> steps,
|
||||
DateTimeOffset timestamp)
|
||||
=> new(
|
||||
runId,
|
||||
planHash,
|
||||
failurePolicy,
|
||||
timestamp,
|
||||
timestamp,
|
||||
new ReadOnlyDictionary<string, PackRunStepStateRecord>(new Dictionary<string, PackRunStepStateRecord>(steps, StringComparer.Ordinal)));
|
||||
}
|
||||
|
||||
public sealed record PackRunStepStateRecord(
|
||||
string StepId,
|
||||
PackRunStepKind Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
PackRunStepExecutionStatus Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
|
||||
public interface IPackRunStateStore
|
||||
{
|
||||
Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken);
|
||||
|
||||
Task SaveAsync(PackRunState state, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken);
|
||||
}
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed record PackRunState(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> Steps)
|
||||
{
|
||||
public static PackRunState Create(
|
||||
string runId,
|
||||
string planHash,
|
||||
TaskPackPlanFailurePolicy failurePolicy,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> steps,
|
||||
DateTimeOffset timestamp)
|
||||
=> new(
|
||||
runId,
|
||||
planHash,
|
||||
failurePolicy,
|
||||
timestamp,
|
||||
timestamp,
|
||||
new ReadOnlyDictionary<string, PackRunStepStateRecord>(new Dictionary<string, PackRunStepStateRecord>(steps, StringComparer.Ordinal)));
|
||||
}
|
||||
|
||||
public sealed record PackRunStepStateRecord(
|
||||
string StepId,
|
||||
PackRunStepKind Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
PackRunStepExecutionStatus Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
|
||||
public interface IPackRunStateStore
|
||||
{
|
||||
Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken);
|
||||
|
||||
Task SaveAsync(PackRunState state, CancellationToken cancellationToken);
|
||||
|
||||
Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
@@ -1,121 +1,121 @@
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public static class PackRunStepStateMachine
|
||||
{
|
||||
public static PackRunStepState Create(DateTimeOffset? createdAt = null)
|
||||
=> new(PackRunStepExecutionStatus.Pending, Attempts: 0, createdAt, NextAttemptAt: null);
|
||||
|
||||
public static PackRunStepState Start(PackRunStepState state, DateTimeOffset startedAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
if (state.Status is not PackRunStepExecutionStatus.Pending)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot start step from status {state.Status}.");
|
||||
}
|
||||
|
||||
return state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Running,
|
||||
LastTransitionAt = startedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
}
|
||||
|
||||
public static PackRunStepState CompleteSuccess(PackRunStepState state, DateTimeOffset completedAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
if (state.Status is not PackRunStepExecutionStatus.Running)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot complete step from status {state.Status}.");
|
||||
}
|
||||
|
||||
return state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Succeeded,
|
||||
Attempts = state.Attempts + 1,
|
||||
LastTransitionAt = completedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
}
|
||||
|
||||
public static PackRunStepFailureResult RegisterFailure(
|
||||
PackRunStepState state,
|
||||
DateTimeOffset failedAt,
|
||||
TaskPackPlanFailurePolicy failurePolicy)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
ArgumentNullException.ThrowIfNull(failurePolicy);
|
||||
|
||||
if (state.Status is not PackRunStepExecutionStatus.Running)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot register failure from status {state.Status}.");
|
||||
}
|
||||
|
||||
var attempts = state.Attempts + 1;
|
||||
if (attempts < failurePolicy.MaxAttempts)
|
||||
{
|
||||
var backoff = TimeSpan.FromSeconds(Math.Max(0, failurePolicy.BackoffSeconds));
|
||||
var nextAttemptAt = failedAt + backoff;
|
||||
var nextState = state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Pending,
|
||||
Attempts = attempts,
|
||||
LastTransitionAt = failedAt,
|
||||
NextAttemptAt = nextAttemptAt
|
||||
};
|
||||
|
||||
return new PackRunStepFailureResult(nextState, PackRunStepFailureOutcome.Retry);
|
||||
}
|
||||
|
||||
var finalState = state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Failed,
|
||||
Attempts = attempts,
|
||||
LastTransitionAt = failedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
|
||||
return new PackRunStepFailureResult(finalState, PackRunStepFailureOutcome.Abort);
|
||||
}
|
||||
|
||||
public static PackRunStepState Skip(PackRunStepState state, DateTimeOffset skippedAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
if (state.Status is not PackRunStepExecutionStatus.Pending)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot skip step from status {state.Status}.");
|
||||
}
|
||||
|
||||
return state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Skipped,
|
||||
LastTransitionAt = skippedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PackRunStepState(
|
||||
PackRunStepExecutionStatus Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt);
|
||||
|
||||
public enum PackRunStepExecutionStatus
|
||||
{
|
||||
Pending = 0,
|
||||
Running,
|
||||
Succeeded,
|
||||
Failed,
|
||||
Skipped
|
||||
}
|
||||
|
||||
public readonly record struct PackRunStepFailureResult(PackRunStepState State, PackRunStepFailureOutcome Outcome);
|
||||
|
||||
public enum PackRunStepFailureOutcome
|
||||
{
|
||||
Retry = 0,
|
||||
Abort
|
||||
}
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public static class PackRunStepStateMachine
|
||||
{
|
||||
public static PackRunStepState Create(DateTimeOffset? createdAt = null)
|
||||
=> new(PackRunStepExecutionStatus.Pending, Attempts: 0, createdAt, NextAttemptAt: null);
|
||||
|
||||
public static PackRunStepState Start(PackRunStepState state, DateTimeOffset startedAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
if (state.Status is not PackRunStepExecutionStatus.Pending)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot start step from status {state.Status}.");
|
||||
}
|
||||
|
||||
return state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Running,
|
||||
LastTransitionAt = startedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
}
|
||||
|
||||
public static PackRunStepState CompleteSuccess(PackRunStepState state, DateTimeOffset completedAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
if (state.Status is not PackRunStepExecutionStatus.Running)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot complete step from status {state.Status}.");
|
||||
}
|
||||
|
||||
return state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Succeeded,
|
||||
Attempts = state.Attempts + 1,
|
||||
LastTransitionAt = completedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
}
|
||||
|
||||
public static PackRunStepFailureResult RegisterFailure(
|
||||
PackRunStepState state,
|
||||
DateTimeOffset failedAt,
|
||||
TaskPackPlanFailurePolicy failurePolicy)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
ArgumentNullException.ThrowIfNull(failurePolicy);
|
||||
|
||||
if (state.Status is not PackRunStepExecutionStatus.Running)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot register failure from status {state.Status}.");
|
||||
}
|
||||
|
||||
var attempts = state.Attempts + 1;
|
||||
if (attempts < failurePolicy.MaxAttempts)
|
||||
{
|
||||
var backoff = TimeSpan.FromSeconds(Math.Max(0, failurePolicy.BackoffSeconds));
|
||||
var nextAttemptAt = failedAt + backoff;
|
||||
var nextState = state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Pending,
|
||||
Attempts = attempts,
|
||||
LastTransitionAt = failedAt,
|
||||
NextAttemptAt = nextAttemptAt
|
||||
};
|
||||
|
||||
return new PackRunStepFailureResult(nextState, PackRunStepFailureOutcome.Retry);
|
||||
}
|
||||
|
||||
var finalState = state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Failed,
|
||||
Attempts = attempts,
|
||||
LastTransitionAt = failedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
|
||||
return new PackRunStepFailureResult(finalState, PackRunStepFailureOutcome.Abort);
|
||||
}
|
||||
|
||||
public static PackRunStepState Skip(PackRunStepState state, DateTimeOffset skippedAt)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
if (state.Status is not PackRunStepExecutionStatus.Pending)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot skip step from status {state.Status}.");
|
||||
}
|
||||
|
||||
return state with
|
||||
{
|
||||
Status = PackRunStepExecutionStatus.Skipped,
|
||||
LastTransitionAt = skippedAt,
|
||||
NextAttemptAt = null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PackRunStepState(
|
||||
PackRunStepExecutionStatus Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt);
|
||||
|
||||
public enum PackRunStepExecutionStatus
|
||||
{
|
||||
Pending = 0,
|
||||
Running,
|
||||
Succeeded,
|
||||
Failed,
|
||||
Skipped
|
||||
}
|
||||
|
||||
public readonly record struct PackRunStepFailureResult(PackRunStepState State, PackRunStepFailureOutcome Outcome);
|
||||
|
||||
public enum PackRunStepFailureOutcome
|
||||
{
|
||||
Retry = 0,
|
||||
Abort
|
||||
}
|
||||
|
||||
@@ -1,78 +1,78 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
|
||||
public sealed class PackRunSimulationEngine
|
||||
{
|
||||
private readonly PackRunExecutionGraphBuilder graphBuilder;
|
||||
|
||||
public PackRunSimulationEngine()
|
||||
{
|
||||
graphBuilder = new PackRunExecutionGraphBuilder();
|
||||
}
|
||||
|
||||
public PackRunSimulationResult Simulate(TaskPackPlan plan)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(plan);
|
||||
|
||||
var graph = graphBuilder.Build(plan);
|
||||
var steps = graph.Steps.Select(ConvertStep).ToList();
|
||||
var outputs = BuildOutputs(plan.Outputs);
|
||||
|
||||
return new PackRunSimulationResult(steps, outputs, graph.FailurePolicy);
|
||||
}
|
||||
|
||||
private static PackRunSimulationNode ConvertStep(PackRunExecutionStep step)
|
||||
{
|
||||
var status = DetermineStatus(step);
|
||||
var children = step.Children.Count == 0
|
||||
? PackRunSimulationNode.Empty
|
||||
: new ReadOnlyCollection<PackRunSimulationNode>(step.Children.Select(ConvertStep).ToList());
|
||||
|
||||
return new PackRunSimulationNode(
|
||||
step.Id,
|
||||
step.TemplateId,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.Uses,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Parameters,
|
||||
step.MaxParallel,
|
||||
step.ContinueOnError,
|
||||
status,
|
||||
children);
|
||||
}
|
||||
|
||||
private static PackRunSimulationStatus DetermineStatus(PackRunExecutionStep step)
|
||||
{
|
||||
if (!step.Enabled)
|
||||
{
|
||||
return PackRunSimulationStatus.Skipped;
|
||||
}
|
||||
|
||||
return step.Kind switch
|
||||
{
|
||||
PackRunStepKind.GateApproval => PackRunSimulationStatus.RequiresApproval,
|
||||
PackRunStepKind.GatePolicy => PackRunSimulationStatus.RequiresPolicy,
|
||||
_ => PackRunSimulationStatus.Pending
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyList<PackRunSimulationOutput> BuildOutputs(IReadOnlyList<TaskPackPlanOutput> outputs)
|
||||
{
|
||||
if (outputs.Count == 0)
|
||||
{
|
||||
return PackRunSimulationOutput.Empty;
|
||||
}
|
||||
|
||||
var list = new List<PackRunSimulationOutput>(outputs.Count);
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
list.Add(new PackRunSimulationOutput(output.Name, output.Type, output.Path, output.Expression));
|
||||
}
|
||||
|
||||
return new ReadOnlyCollection<PackRunSimulationOutput>(list);
|
||||
}
|
||||
}
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
|
||||
public sealed class PackRunSimulationEngine
|
||||
{
|
||||
private readonly PackRunExecutionGraphBuilder graphBuilder;
|
||||
|
||||
public PackRunSimulationEngine()
|
||||
{
|
||||
graphBuilder = new PackRunExecutionGraphBuilder();
|
||||
}
|
||||
|
||||
public PackRunSimulationResult Simulate(TaskPackPlan plan)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(plan);
|
||||
|
||||
var graph = graphBuilder.Build(plan);
|
||||
var steps = graph.Steps.Select(ConvertStep).ToList();
|
||||
var outputs = BuildOutputs(plan.Outputs);
|
||||
|
||||
return new PackRunSimulationResult(steps, outputs, graph.FailurePolicy);
|
||||
}
|
||||
|
||||
private static PackRunSimulationNode ConvertStep(PackRunExecutionStep step)
|
||||
{
|
||||
var status = DetermineStatus(step);
|
||||
var children = step.Children.Count == 0
|
||||
? PackRunSimulationNode.Empty
|
||||
: new ReadOnlyCollection<PackRunSimulationNode>(step.Children.Select(ConvertStep).ToList());
|
||||
|
||||
return new PackRunSimulationNode(
|
||||
step.Id,
|
||||
step.TemplateId,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.Uses,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Parameters,
|
||||
step.MaxParallel,
|
||||
step.ContinueOnError,
|
||||
status,
|
||||
children);
|
||||
}
|
||||
|
||||
private static PackRunSimulationStatus DetermineStatus(PackRunExecutionStep step)
|
||||
{
|
||||
if (!step.Enabled)
|
||||
{
|
||||
return PackRunSimulationStatus.Skipped;
|
||||
}
|
||||
|
||||
return step.Kind switch
|
||||
{
|
||||
PackRunStepKind.GateApproval => PackRunSimulationStatus.RequiresApproval,
|
||||
PackRunStepKind.GatePolicy => PackRunSimulationStatus.RequiresPolicy,
|
||||
_ => PackRunSimulationStatus.Pending
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyList<PackRunSimulationOutput> BuildOutputs(IReadOnlyList<TaskPackPlanOutput> outputs)
|
||||
{
|
||||
if (outputs.Count == 0)
|
||||
{
|
||||
return PackRunSimulationOutput.Empty;
|
||||
}
|
||||
|
||||
var list = new List<PackRunSimulationOutput>(outputs.Count);
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
list.Add(new PackRunSimulationOutput(output.Name, output.Type, output.Path, output.Expression));
|
||||
}
|
||||
|
||||
return new ReadOnlyCollection<PackRunSimulationOutput>(list);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,131 +1,131 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
|
||||
public sealed class PackRunSimulationResult
|
||||
{
|
||||
public PackRunSimulationResult(
|
||||
IReadOnlyList<PackRunSimulationNode> steps,
|
||||
IReadOnlyList<PackRunSimulationOutput> outputs,
|
||||
TaskPackPlanFailurePolicy failurePolicy)
|
||||
{
|
||||
Steps = steps ?? throw new ArgumentNullException(nameof(steps));
|
||||
Outputs = outputs ?? throw new ArgumentNullException(nameof(outputs));
|
||||
FailurePolicy = failurePolicy ?? throw new ArgumentNullException(nameof(failurePolicy));
|
||||
}
|
||||
|
||||
public IReadOnlyList<PackRunSimulationNode> Steps { get; }
|
||||
|
||||
public IReadOnlyList<PackRunSimulationOutput> Outputs { get; }
|
||||
|
||||
public TaskPackPlanFailurePolicy FailurePolicy { get; }
|
||||
|
||||
public bool HasPendingApprovals => Steps.Any(ContainsApprovalRequirement);
|
||||
|
||||
private static bool ContainsApprovalRequirement(PackRunSimulationNode node)
|
||||
{
|
||||
if (node.Status is PackRunSimulationStatus.RequiresApproval or PackRunSimulationStatus.RequiresPolicy)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return node.Children.Any(ContainsApprovalRequirement);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PackRunSimulationNode
|
||||
{
|
||||
public PackRunSimulationNode(
|
||||
string id,
|
||||
string templateId,
|
||||
PackRunStepKind kind,
|
||||
bool enabled,
|
||||
string? uses,
|
||||
string? approvalId,
|
||||
string? gateMessage,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
int? maxParallel,
|
||||
bool continueOnError,
|
||||
PackRunSimulationStatus status,
|
||||
IReadOnlyList<PackRunSimulationNode> children)
|
||||
{
|
||||
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(id)) : id;
|
||||
TemplateId = string.IsNullOrWhiteSpace(templateId) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(templateId)) : templateId;
|
||||
Kind = kind;
|
||||
Enabled = enabled;
|
||||
Uses = uses;
|
||||
ApprovalId = approvalId;
|
||||
GateMessage = gateMessage;
|
||||
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
|
||||
MaxParallel = maxParallel;
|
||||
ContinueOnError = continueOnError;
|
||||
Status = status;
|
||||
Children = children ?? throw new ArgumentNullException(nameof(children));
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
|
||||
public string TemplateId { get; }
|
||||
|
||||
public PackRunStepKind Kind { get; }
|
||||
|
||||
public bool Enabled { get; }
|
||||
|
||||
public string? Uses { get; }
|
||||
|
||||
public string? ApprovalId { get; }
|
||||
|
||||
public string? GateMessage { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, TaskPackPlanParameterValue> Parameters { get; }
|
||||
|
||||
public int? MaxParallel { get; }
|
||||
|
||||
public bool ContinueOnError { get; }
|
||||
|
||||
public PackRunSimulationStatus Status { get; }
|
||||
|
||||
public IReadOnlyList<PackRunSimulationNode> Children { get; }
|
||||
|
||||
public static IReadOnlyList<PackRunSimulationNode> Empty { get; } =
|
||||
new ReadOnlyCollection<PackRunSimulationNode>(Array.Empty<PackRunSimulationNode>());
|
||||
}
|
||||
|
||||
public enum PackRunSimulationStatus
|
||||
{
|
||||
Pending = 0,
|
||||
Skipped,
|
||||
RequiresApproval,
|
||||
RequiresPolicy
|
||||
}
|
||||
|
||||
public sealed class PackRunSimulationOutput
|
||||
{
|
||||
public PackRunSimulationOutput(
|
||||
string name,
|
||||
string type,
|
||||
TaskPackPlanParameterValue? path,
|
||||
TaskPackPlanParameterValue? expression)
|
||||
{
|
||||
Name = string.IsNullOrWhiteSpace(name) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(name)) : name;
|
||||
Type = string.IsNullOrWhiteSpace(type) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(type)) : type;
|
||||
Path = path;
|
||||
Expression = expression;
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
|
||||
public string Type { get; }
|
||||
|
||||
public TaskPackPlanParameterValue? Path { get; }
|
||||
|
||||
public TaskPackPlanParameterValue? Expression { get; }
|
||||
|
||||
public bool RequiresRuntimeValue =>
|
||||
(Path?.RequiresRuntimeValue ?? false) ||
|
||||
(Expression?.RequiresRuntimeValue ?? false);
|
||||
|
||||
public static IReadOnlyList<PackRunSimulationOutput> Empty { get; } =
|
||||
new ReadOnlyCollection<PackRunSimulationOutput>(Array.Empty<PackRunSimulationOutput>());
|
||||
}
|
||||
using System.Collections.ObjectModel;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
|
||||
public sealed class PackRunSimulationResult
|
||||
{
|
||||
public PackRunSimulationResult(
|
||||
IReadOnlyList<PackRunSimulationNode> steps,
|
||||
IReadOnlyList<PackRunSimulationOutput> outputs,
|
||||
TaskPackPlanFailurePolicy failurePolicy)
|
||||
{
|
||||
Steps = steps ?? throw new ArgumentNullException(nameof(steps));
|
||||
Outputs = outputs ?? throw new ArgumentNullException(nameof(outputs));
|
||||
FailurePolicy = failurePolicy ?? throw new ArgumentNullException(nameof(failurePolicy));
|
||||
}
|
||||
|
||||
public IReadOnlyList<PackRunSimulationNode> Steps { get; }
|
||||
|
||||
public IReadOnlyList<PackRunSimulationOutput> Outputs { get; }
|
||||
|
||||
public TaskPackPlanFailurePolicy FailurePolicy { get; }
|
||||
|
||||
public bool HasPendingApprovals => Steps.Any(ContainsApprovalRequirement);
|
||||
|
||||
private static bool ContainsApprovalRequirement(PackRunSimulationNode node)
|
||||
{
|
||||
if (node.Status is PackRunSimulationStatus.RequiresApproval or PackRunSimulationStatus.RequiresPolicy)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return node.Children.Any(ContainsApprovalRequirement);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class PackRunSimulationNode
|
||||
{
|
||||
public PackRunSimulationNode(
|
||||
string id,
|
||||
string templateId,
|
||||
PackRunStepKind kind,
|
||||
bool enabled,
|
||||
string? uses,
|
||||
string? approvalId,
|
||||
string? gateMessage,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
int? maxParallel,
|
||||
bool continueOnError,
|
||||
PackRunSimulationStatus status,
|
||||
IReadOnlyList<PackRunSimulationNode> children)
|
||||
{
|
||||
Id = string.IsNullOrWhiteSpace(id) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(id)) : id;
|
||||
TemplateId = string.IsNullOrWhiteSpace(templateId) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(templateId)) : templateId;
|
||||
Kind = kind;
|
||||
Enabled = enabled;
|
||||
Uses = uses;
|
||||
ApprovalId = approvalId;
|
||||
GateMessage = gateMessage;
|
||||
Parameters = parameters ?? throw new ArgumentNullException(nameof(parameters));
|
||||
MaxParallel = maxParallel;
|
||||
ContinueOnError = continueOnError;
|
||||
Status = status;
|
||||
Children = children ?? throw new ArgumentNullException(nameof(children));
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
|
||||
public string TemplateId { get; }
|
||||
|
||||
public PackRunStepKind Kind { get; }
|
||||
|
||||
public bool Enabled { get; }
|
||||
|
||||
public string? Uses { get; }
|
||||
|
||||
public string? ApprovalId { get; }
|
||||
|
||||
public string? GateMessage { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, TaskPackPlanParameterValue> Parameters { get; }
|
||||
|
||||
public int? MaxParallel { get; }
|
||||
|
||||
public bool ContinueOnError { get; }
|
||||
|
||||
public PackRunSimulationStatus Status { get; }
|
||||
|
||||
public IReadOnlyList<PackRunSimulationNode> Children { get; }
|
||||
|
||||
public static IReadOnlyList<PackRunSimulationNode> Empty { get; } =
|
||||
new ReadOnlyCollection<PackRunSimulationNode>(Array.Empty<PackRunSimulationNode>());
|
||||
}
|
||||
|
||||
public enum PackRunSimulationStatus
|
||||
{
|
||||
Pending = 0,
|
||||
Skipped,
|
||||
RequiresApproval,
|
||||
RequiresPolicy
|
||||
}
|
||||
|
||||
public sealed class PackRunSimulationOutput
|
||||
{
|
||||
public PackRunSimulationOutput(
|
||||
string name,
|
||||
string type,
|
||||
TaskPackPlanParameterValue? path,
|
||||
TaskPackPlanParameterValue? expression)
|
||||
{
|
||||
Name = string.IsNullOrWhiteSpace(name) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(name)) : name;
|
||||
Type = string.IsNullOrWhiteSpace(type) ? throw new ArgumentException("Value cannot be null or whitespace.", nameof(type)) : type;
|
||||
Path = path;
|
||||
Expression = expression;
|
||||
}
|
||||
|
||||
public string Name { get; }
|
||||
|
||||
public string Type { get; }
|
||||
|
||||
public TaskPackPlanParameterValue? Path { get; }
|
||||
|
||||
public TaskPackPlanParameterValue? Expression { get; }
|
||||
|
||||
public bool RequiresRuntimeValue =>
|
||||
(Path?.RequiresRuntimeValue ?? false) ||
|
||||
(Expression?.RequiresRuntimeValue ?? false);
|
||||
|
||||
public static IReadOnlyList<PackRunSimulationOutput> Empty { get; } =
|
||||
new ReadOnlyCollection<PackRunSimulationOutput>(Array.Empty<PackRunSimulationOutput>());
|
||||
}
|
||||
|
||||
@@ -1,191 +1,191 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
/// <summary>
|
||||
/// File-system backed implementation of <see cref="IPackRunStateStore"/> intended for development and air-gapped smoke tests.
|
||||
/// </summary>
|
||||
public sealed class FilePackRunStateStore : IPackRunStateStore
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
private readonly string rootPath;
|
||||
private readonly SemaphoreSlim mutex = new(1, 1);
|
||||
|
||||
public FilePackRunStateStore(string rootPath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
|
||||
|
||||
this.rootPath = Path.GetFullPath(rootPath);
|
||||
Directory.CreateDirectory(this.rootPath);
|
||||
}
|
||||
|
||||
public async Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
|
||||
|
||||
var path = GetPath(runId);
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
await using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var document = await JsonSerializer.DeserializeAsync<StateDocument>(stream, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return document?.ToDomain();
|
||||
}
|
||||
|
||||
public async Task SaveAsync(PackRunState state, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
|
||||
var path = GetPath(state.RunId);
|
||||
var document = StateDocument.FromDomain(state);
|
||||
|
||||
Directory.CreateDirectory(rootPath);
|
||||
|
||||
await mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (!Directory.Exists(rootPath))
|
||||
{
|
||||
return Array.Empty<PackRunState>();
|
||||
}
|
||||
|
||||
var states = new List<PackRunState>();
|
||||
|
||||
var files = Directory.EnumerateFiles(rootPath, "*.json", SearchOption.TopDirectoryOnly)
|
||||
.OrderBy(file => file, StringComparer.Ordinal);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
await using var stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var document = await JsonSerializer.DeserializeAsync<StateDocument>(stream, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (document is not null)
|
||||
{
|
||||
states.Add(document.ToDomain());
|
||||
}
|
||||
}
|
||||
|
||||
return states;
|
||||
}
|
||||
|
||||
private string GetPath(string runId)
|
||||
{
|
||||
var safeName = SanitizeFileName(runId);
|
||||
return Path.Combine(rootPath, $"{safeName}.json");
|
||||
}
|
||||
|
||||
private static string SanitizeFileName(string value)
|
||||
{
|
||||
var result = value.Trim();
|
||||
foreach (var invalid in Path.GetInvalidFileNameChars())
|
||||
{
|
||||
result = result.Replace(invalid, '_');
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private sealed record StateDocument(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<StepDocument> Steps)
|
||||
{
|
||||
public static StateDocument FromDomain(PackRunState state)
|
||||
{
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new StepDocument(
|
||||
step.StepId,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status,
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new StateDocument(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
state.FailurePolicy,
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
|
||||
public PackRunState ToDomain()
|
||||
{
|
||||
var steps = Steps.ToDictionary(
|
||||
step => step.StepId,
|
||||
step => new PackRunStepStateRecord(
|
||||
step.StepId,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status,
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason),
|
||||
StringComparer.Ordinal);
|
||||
|
||||
return new PackRunState(
|
||||
RunId,
|
||||
PlanHash,
|
||||
FailurePolicy,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record StepDocument(
|
||||
string StepId,
|
||||
PackRunStepKind Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
PackRunStepExecutionStatus Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
}
|
||||
using System.Text.Json;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
/// <summary>
|
||||
/// File-system backed implementation of <see cref="IPackRunStateStore"/> intended for development and air-gapped smoke tests.
|
||||
/// </summary>
|
||||
public sealed class FilePackRunStateStore : IPackRunStateStore
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
private readonly string rootPath;
|
||||
private readonly SemaphoreSlim mutex = new(1, 1);
|
||||
|
||||
public FilePackRunStateStore(string rootPath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
|
||||
|
||||
this.rootPath = Path.GetFullPath(rootPath);
|
||||
Directory.CreateDirectory(this.rootPath);
|
||||
}
|
||||
|
||||
public async Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
|
||||
|
||||
var path = GetPath(runId);
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
await using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var document = await JsonSerializer.DeserializeAsync<StateDocument>(stream, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return document?.ToDomain();
|
||||
}
|
||||
|
||||
public async Task SaveAsync(PackRunState state, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
|
||||
var path = GetPath(state.RunId);
|
||||
var document = StateDocument.FromDomain(state);
|
||||
|
||||
Directory.CreateDirectory(rootPath);
|
||||
|
||||
await mutex.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
await using var stream = File.Open(path, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
mutex.Release();
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (!Directory.Exists(rootPath))
|
||||
{
|
||||
return Array.Empty<PackRunState>();
|
||||
}
|
||||
|
||||
var states = new List<PackRunState>();
|
||||
|
||||
var files = Directory.EnumerateFiles(rootPath, "*.json", SearchOption.TopDirectoryOnly)
|
||||
.OrderBy(file => file, StringComparer.Ordinal);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
await using var stream = File.Open(file, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
var document = await JsonSerializer.DeserializeAsync<StateDocument>(stream, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (document is not null)
|
||||
{
|
||||
states.Add(document.ToDomain());
|
||||
}
|
||||
}
|
||||
|
||||
return states;
|
||||
}
|
||||
|
||||
private string GetPath(string runId)
|
||||
{
|
||||
var safeName = SanitizeFileName(runId);
|
||||
return Path.Combine(rootPath, $"{safeName}.json");
|
||||
}
|
||||
|
||||
private static string SanitizeFileName(string value)
|
||||
{
|
||||
var result = value.Trim();
|
||||
foreach (var invalid in Path.GetInvalidFileNameChars())
|
||||
{
|
||||
result = result.Replace(invalid, '_');
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private sealed record StateDocument(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<StepDocument> Steps)
|
||||
{
|
||||
public static StateDocument FromDomain(PackRunState state)
|
||||
{
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new StepDocument(
|
||||
step.StepId,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status,
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new StateDocument(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
state.FailurePolicy,
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
|
||||
public PackRunState ToDomain()
|
||||
{
|
||||
var steps = Steps.ToDictionary(
|
||||
step => step.StepId,
|
||||
step => new PackRunStepStateRecord(
|
||||
step.StepId,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status,
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason),
|
||||
StringComparer.Ordinal);
|
||||
|
||||
return new PackRunState(
|
||||
RunId,
|
||||
PlanHash,
|
||||
FailurePolicy,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record StepDocument(
|
||||
string StepId,
|
||||
PackRunStepKind Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
PackRunStepExecutionStatus Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
}
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
public sealed class NoopPackRunStepExecutor : IPackRunStepExecutor
|
||||
{
|
||||
public Task<PackRunStepExecutionResult> ExecuteAsync(
|
||||
PackRunExecutionStep step,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (parameters.TryGetValue("simulateFailure", out var value) &&
|
||||
value.Value is JsonValue jsonValue &&
|
||||
jsonValue.TryGetValue<bool>(out var failure) &&
|
||||
failure)
|
||||
{
|
||||
return Task.FromResult(new PackRunStepExecutionResult(false, "Simulated failure requested."));
|
||||
}
|
||||
|
||||
return Task.FromResult(new PackRunStepExecutionResult(true));
|
||||
}
|
||||
}
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
public sealed class NoopPackRunStepExecutor : IPackRunStepExecutor
|
||||
{
|
||||
public Task<PackRunStepExecutionResult> ExecuteAsync(
|
||||
PackRunExecutionStep step,
|
||||
IReadOnlyDictionary<string, TaskPackPlanParameterValue> parameters,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (parameters.TryGetValue("simulateFailure", out var value) &&
|
||||
value.Value is JsonValue jsonValue &&
|
||||
jsonValue.TryGetValue<bool>(out var failure) &&
|
||||
failure)
|
||||
{
|
||||
return Task.FromResult(new PackRunStepExecutionResult(false, "Simulated failure requested."));
|
||||
}
|
||||
|
||||
return Task.FromResult(new PackRunStepExecutionResult(true));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,105 +1,105 @@
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class FilePackRunStateStoreTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SaveAndGetAsync_RoundTripsState()
|
||||
{
|
||||
var directory = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var store = new FilePackRunStateStore(directory);
|
||||
var original = CreateState("run:primary");
|
||||
|
||||
await store.SaveAsync(original, CancellationToken.None);
|
||||
|
||||
var reloaded = await store.GetAsync("run:primary", CancellationToken.None);
|
||||
Assert.NotNull(reloaded);
|
||||
Assert.Equal(original.RunId, reloaded!.RunId);
|
||||
Assert.Equal(original.PlanHash, reloaded.PlanHash);
|
||||
Assert.Equal(original.FailurePolicy, reloaded.FailurePolicy);
|
||||
Assert.Equal(original.Steps.Count, reloaded.Steps.Count);
|
||||
var step = Assert.Single(reloaded.Steps);
|
||||
Assert.Equal("step-a", step.Key);
|
||||
Assert.Equal(original.Steps["step-a"], step.Value);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDelete(directory);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsStatesInDeterministicOrder()
|
||||
{
|
||||
var directory = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var store = new FilePackRunStateStore(directory);
|
||||
var stateB = CreateState("run-b");
|
||||
var stateA = CreateState("run-a");
|
||||
|
||||
await store.SaveAsync(stateB, CancellationToken.None);
|
||||
await store.SaveAsync(stateA, CancellationToken.None);
|
||||
|
||||
var states = await store.ListAsync(CancellationToken.None);
|
||||
|
||||
Assert.Collection(states,
|
||||
first => Assert.Equal("run-a", first.RunId),
|
||||
second => Assert.Equal("run-b", second.RunId));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDelete(directory);
|
||||
}
|
||||
}
|
||||
|
||||
private static PackRunState CreateState(string runId)
|
||||
{
|
||||
var failurePolicy = new TaskPackPlanFailurePolicy(MaxAttempts: 3, BackoffSeconds: 30, ContinueOnError: false);
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal)
|
||||
{
|
||||
["step-a"] = new PackRunStepStateRecord(
|
||||
StepId: "step-a",
|
||||
Kind: PackRunStepKind.Run,
|
||||
Enabled: true,
|
||||
ContinueOnError: false,
|
||||
MaxParallel: null,
|
||||
ApprovalId: null,
|
||||
GateMessage: null,
|
||||
Status: PackRunStepExecutionStatus.Pending,
|
||||
Attempts: 1,
|
||||
LastTransitionAt: DateTimeOffset.UtcNow,
|
||||
NextAttemptAt: null,
|
||||
StatusReason: null)
|
||||
};
|
||||
|
||||
return PackRunState.Create(runId, "hash-123", failurePolicy, steps, DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static string CreateTempDirectory()
|
||||
{
|
||||
var path = Path.Combine(Path.GetTempPath(), "stellaops-taskrunner-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
private static void TryDelete(string directory)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(directory))
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Swallow cleanup errors to avoid masking test assertions.
|
||||
}
|
||||
}
|
||||
}
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class FilePackRunStateStoreTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SaveAndGetAsync_RoundTripsState()
|
||||
{
|
||||
var directory = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var store = new FilePackRunStateStore(directory);
|
||||
var original = CreateState("run:primary");
|
||||
|
||||
await store.SaveAsync(original, CancellationToken.None);
|
||||
|
||||
var reloaded = await store.GetAsync("run:primary", CancellationToken.None);
|
||||
Assert.NotNull(reloaded);
|
||||
Assert.Equal(original.RunId, reloaded!.RunId);
|
||||
Assert.Equal(original.PlanHash, reloaded.PlanHash);
|
||||
Assert.Equal(original.FailurePolicy, reloaded.FailurePolicy);
|
||||
Assert.Equal(original.Steps.Count, reloaded.Steps.Count);
|
||||
var step = Assert.Single(reloaded.Steps);
|
||||
Assert.Equal("step-a", step.Key);
|
||||
Assert.Equal(original.Steps["step-a"], step.Value);
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDelete(directory);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsStatesInDeterministicOrder()
|
||||
{
|
||||
var directory = CreateTempDirectory();
|
||||
try
|
||||
{
|
||||
var store = new FilePackRunStateStore(directory);
|
||||
var stateB = CreateState("run-b");
|
||||
var stateA = CreateState("run-a");
|
||||
|
||||
await store.SaveAsync(stateB, CancellationToken.None);
|
||||
await store.SaveAsync(stateA, CancellationToken.None);
|
||||
|
||||
var states = await store.ListAsync(CancellationToken.None);
|
||||
|
||||
Assert.Collection(states,
|
||||
first => Assert.Equal("run-a", first.RunId),
|
||||
second => Assert.Equal("run-b", second.RunId));
|
||||
}
|
||||
finally
|
||||
{
|
||||
TryDelete(directory);
|
||||
}
|
||||
}
|
||||
|
||||
private static PackRunState CreateState(string runId)
|
||||
{
|
||||
var failurePolicy = new TaskPackPlanFailurePolicy(MaxAttempts: 3, BackoffSeconds: 30, ContinueOnError: false);
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal)
|
||||
{
|
||||
["step-a"] = new PackRunStepStateRecord(
|
||||
StepId: "step-a",
|
||||
Kind: PackRunStepKind.Run,
|
||||
Enabled: true,
|
||||
ContinueOnError: false,
|
||||
MaxParallel: null,
|
||||
ApprovalId: null,
|
||||
GateMessage: null,
|
||||
Status: PackRunStepExecutionStatus.Pending,
|
||||
Attempts: 1,
|
||||
LastTransitionAt: DateTimeOffset.UtcNow,
|
||||
NextAttemptAt: null,
|
||||
StatusReason: null)
|
||||
};
|
||||
|
||||
return PackRunState.Create(runId, "hash-123", failurePolicy, steps, DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static string CreateTempDirectory()
|
||||
{
|
||||
var path = Path.Combine(Path.GetTempPath(), "stellaops-taskrunner-tests", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
private static void TryDelete(string directory)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(directory))
|
||||
{
|
||||
Directory.Delete(directory, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Swallow cleanup errors to avoid masking test assertions.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,68 +1,68 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunExecutionGraphBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_GeneratesParallelMetadata()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Parallel);
|
||||
var planner = new TaskPackPlanner();
|
||||
|
||||
var result = planner.Plan(manifest);
|
||||
Assert.True(result.Success);
|
||||
var plan = result.Plan!;
|
||||
|
||||
var builder = new PackRunExecutionGraphBuilder();
|
||||
var graph = builder.Build(plan);
|
||||
|
||||
Assert.Equal(2, graph.FailurePolicy.MaxAttempts);
|
||||
Assert.Equal(10, graph.FailurePolicy.BackoffSeconds);
|
||||
|
||||
var parallel = Assert.Single(graph.Steps);
|
||||
Assert.Equal(PackRunStepKind.Parallel, parallel.Kind);
|
||||
Assert.True(parallel.Enabled);
|
||||
Assert.Equal(2, parallel.MaxParallel);
|
||||
Assert.True(parallel.ContinueOnError);
|
||||
Assert.Equal(2, parallel.Children.Count);
|
||||
Assert.All(parallel.Children, child => Assert.Equal(PackRunStepKind.Run, child.Kind));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_PreservesMapIterationsAndDisabledSteps()
|
||||
{
|
||||
var planner = new TaskPackPlanner();
|
||||
var builder = new PackRunExecutionGraphBuilder();
|
||||
|
||||
// Map iterations
|
||||
var mapManifest = TestManifests.Load(TestManifests.Map);
|
||||
var inputs = new Dictionary<string, JsonNode?>
|
||||
{
|
||||
["targets"] = new JsonArray("alpha", "beta", "gamma")
|
||||
};
|
||||
|
||||
var mapPlan = planner.Plan(mapManifest, inputs).Plan!;
|
||||
var mapGraph = builder.Build(mapPlan);
|
||||
|
||||
var mapStep = Assert.Single(mapGraph.Steps);
|
||||
Assert.Equal(PackRunStepKind.Map, mapStep.Kind);
|
||||
Assert.Equal(3, mapStep.Children.Count);
|
||||
Assert.All(mapStep.Children, child => Assert.Equal(PackRunStepKind.Run, child.Kind));
|
||||
|
||||
// Disabled conditional step
|
||||
var conditionalManifest = TestManifests.Load(TestManifests.Sample);
|
||||
var conditionalInputs = new Dictionary<string, JsonNode?>
|
||||
{
|
||||
["dryRun"] = JsonValue.Create(true)
|
||||
};
|
||||
|
||||
var conditionalPlan = planner.Plan(conditionalManifest, conditionalInputs).Plan!;
|
||||
var conditionalGraph = builder.Build(conditionalPlan);
|
||||
|
||||
var applyStep = conditionalGraph.Steps.Single(step => step.Id == "apply-step");
|
||||
Assert.False(applyStep.Enabled);
|
||||
}
|
||||
}
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunExecutionGraphBuilderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Build_GeneratesParallelMetadata()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Parallel);
|
||||
var planner = new TaskPackPlanner();
|
||||
|
||||
var result = planner.Plan(manifest);
|
||||
Assert.True(result.Success);
|
||||
var plan = result.Plan!;
|
||||
|
||||
var builder = new PackRunExecutionGraphBuilder();
|
||||
var graph = builder.Build(plan);
|
||||
|
||||
Assert.Equal(2, graph.FailurePolicy.MaxAttempts);
|
||||
Assert.Equal(10, graph.FailurePolicy.BackoffSeconds);
|
||||
|
||||
var parallel = Assert.Single(graph.Steps);
|
||||
Assert.Equal(PackRunStepKind.Parallel, parallel.Kind);
|
||||
Assert.True(parallel.Enabled);
|
||||
Assert.Equal(2, parallel.MaxParallel);
|
||||
Assert.True(parallel.ContinueOnError);
|
||||
Assert.Equal(2, parallel.Children.Count);
|
||||
Assert.All(parallel.Children, child => Assert.Equal(PackRunStepKind.Run, child.Kind));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_PreservesMapIterationsAndDisabledSteps()
|
||||
{
|
||||
var planner = new TaskPackPlanner();
|
||||
var builder = new PackRunExecutionGraphBuilder();
|
||||
|
||||
// Map iterations
|
||||
var mapManifest = TestManifests.Load(TestManifests.Map);
|
||||
var inputs = new Dictionary<string, JsonNode?>
|
||||
{
|
||||
["targets"] = new JsonArray("alpha", "beta", "gamma")
|
||||
};
|
||||
|
||||
var mapPlan = planner.Plan(mapManifest, inputs).Plan!;
|
||||
var mapGraph = builder.Build(mapPlan);
|
||||
|
||||
var mapStep = Assert.Single(mapGraph.Steps);
|
||||
Assert.Equal(PackRunStepKind.Map, mapStep.Kind);
|
||||
Assert.Equal(3, mapStep.Children.Count);
|
||||
Assert.All(mapStep.Children, child => Assert.Equal(PackRunStepKind.Run, child.Kind));
|
||||
|
||||
// Disabled conditional step
|
||||
var conditionalManifest = TestManifests.Load(TestManifests.Sample);
|
||||
var conditionalInputs = new Dictionary<string, JsonNode?>
|
||||
{
|
||||
["dryRun"] = JsonValue.Create(true)
|
||||
};
|
||||
|
||||
var conditionalPlan = planner.Plan(conditionalManifest, conditionalInputs).Plan!;
|
||||
var conditionalGraph = builder.Build(conditionalPlan);
|
||||
|
||||
var applyStep = conditionalGraph.Steps.Single(step => step.Id == "apply-step");
|
||||
Assert.False(applyStep.Enabled);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,150 +1,150 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunGateStateUpdaterTests
|
||||
{
|
||||
private static readonly DateTimeOffset RequestedAt = DateTimeOffset.UnixEpoch;
|
||||
private static readonly DateTimeOffset UpdateTimestamp = DateTimeOffset.UnixEpoch.AddMinutes(5);
|
||||
|
||||
[Fact]
|
||||
public void Apply_ApprovedGate_ClearsReasonAndSucceeds()
|
||||
{
|
||||
var plan = BuildApprovalPlan();
|
||||
var graph = new PackRunExecutionGraphBuilder().Build(plan);
|
||||
var state = CreateInitialState(plan, graph);
|
||||
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
|
||||
coordinator.Approve("security-review", "approver-1", UpdateTimestamp);
|
||||
|
||||
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
|
||||
|
||||
Assert.False(result.HasBlockingFailure);
|
||||
Assert.Equal(UpdateTimestamp, result.State.UpdatedAt);
|
||||
|
||||
var gate = result.State.Steps["approval"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Succeeded, gate.Status);
|
||||
Assert.Null(gate.StatusReason);
|
||||
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_RejectedGate_FlagsFailure()
|
||||
{
|
||||
var plan = BuildApprovalPlan();
|
||||
var graph = new PackRunExecutionGraphBuilder().Build(plan);
|
||||
var state = CreateInitialState(plan, graph);
|
||||
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
|
||||
coordinator.Reject("security-review", "approver-1", UpdateTimestamp, "not-safe");
|
||||
|
||||
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
|
||||
|
||||
Assert.True(result.HasBlockingFailure);
|
||||
Assert.Equal(UpdateTimestamp, result.State.UpdatedAt);
|
||||
|
||||
var gate = result.State.Steps["approval"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Failed, gate.Status);
|
||||
Assert.StartsWith("approval-rejected", gate.StatusReason, StringComparison.Ordinal);
|
||||
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_PolicyGate_ClearsPendingReason()
|
||||
{
|
||||
var plan = BuildPolicyPlan();
|
||||
var graph = new PackRunExecutionGraphBuilder().Build(plan);
|
||||
var state = CreateInitialState(plan, graph);
|
||||
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
|
||||
|
||||
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
|
||||
|
||||
Assert.False(result.HasBlockingFailure);
|
||||
|
||||
var gate = result.State.Steps["policy-check"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Succeeded, gate.Status);
|
||||
Assert.Null(gate.StatusReason);
|
||||
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
|
||||
|
||||
var prepare = result.State.Steps["prepare"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Pending, prepare.Status);
|
||||
Assert.Null(prepare.StatusReason);
|
||||
}
|
||||
|
||||
private static TaskPackPlan BuildApprovalPlan()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Sample);
|
||||
var planner = new TaskPackPlanner();
|
||||
var inputs = new Dictionary<string, System.Text.Json.Nodes.JsonNode?>
|
||||
{
|
||||
["dryRun"] = System.Text.Json.Nodes.JsonValue.Create(false)
|
||||
};
|
||||
|
||||
return planner.Plan(manifest, inputs).Plan!;
|
||||
}
|
||||
|
||||
private static TaskPackPlan BuildPolicyPlan()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.PolicyGate);
|
||||
var planner = new TaskPackPlanner();
|
||||
return planner.Plan(manifest).Plan!;
|
||||
}
|
||||
|
||||
private static PackRunState CreateInitialState(TaskPackPlan plan, PackRunExecutionGraph graph)
|
||||
{
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var step in EnumerateSteps(graph.Steps))
|
||||
{
|
||||
var status = PackRunStepExecutionStatus.Pending;
|
||||
string? reason = null;
|
||||
|
||||
if (!step.Enabled)
|
||||
{
|
||||
status = PackRunStepExecutionStatus.Skipped;
|
||||
reason = "disabled";
|
||||
}
|
||||
else if (step.Kind == PackRunStepKind.GateApproval)
|
||||
{
|
||||
reason = "requires-approval";
|
||||
}
|
||||
else if (step.Kind == PackRunStepKind.GatePolicy)
|
||||
{
|
||||
reason = "requires-policy";
|
||||
}
|
||||
|
||||
steps[step.Id] = new PackRunStepStateRecord(
|
||||
step.Id,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
status,
|
||||
Attempts: 0,
|
||||
LastTransitionAt: null,
|
||||
NextAttemptAt: null,
|
||||
StatusReason: reason);
|
||||
}
|
||||
|
||||
return PackRunState.Create("run-1", plan.Hash, graph.FailurePolicy, steps, RequestedAt);
|
||||
}
|
||||
|
||||
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
|
||||
{
|
||||
foreach (var step in steps)
|
||||
{
|
||||
yield return step;
|
||||
|
||||
if (step.Children.Count > 0)
|
||||
{
|
||||
foreach (var child in EnumerateSteps(step.Children))
|
||||
{
|
||||
yield return child;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunGateStateUpdaterTests
|
||||
{
|
||||
private static readonly DateTimeOffset RequestedAt = DateTimeOffset.UnixEpoch;
|
||||
private static readonly DateTimeOffset UpdateTimestamp = DateTimeOffset.UnixEpoch.AddMinutes(5);
|
||||
|
||||
[Fact]
|
||||
public void Apply_ApprovedGate_ClearsReasonAndSucceeds()
|
||||
{
|
||||
var plan = BuildApprovalPlan();
|
||||
var graph = new PackRunExecutionGraphBuilder().Build(plan);
|
||||
var state = CreateInitialState(plan, graph);
|
||||
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
|
||||
coordinator.Approve("security-review", "approver-1", UpdateTimestamp);
|
||||
|
||||
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
|
||||
|
||||
Assert.False(result.HasBlockingFailure);
|
||||
Assert.Equal(UpdateTimestamp, result.State.UpdatedAt);
|
||||
|
||||
var gate = result.State.Steps["approval"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Succeeded, gate.Status);
|
||||
Assert.Null(gate.StatusReason);
|
||||
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_RejectedGate_FlagsFailure()
|
||||
{
|
||||
var plan = BuildApprovalPlan();
|
||||
var graph = new PackRunExecutionGraphBuilder().Build(plan);
|
||||
var state = CreateInitialState(plan, graph);
|
||||
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
|
||||
coordinator.Reject("security-review", "approver-1", UpdateTimestamp, "not-safe");
|
||||
|
||||
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
|
||||
|
||||
Assert.True(result.HasBlockingFailure);
|
||||
Assert.Equal(UpdateTimestamp, result.State.UpdatedAt);
|
||||
|
||||
var gate = result.State.Steps["approval"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Failed, gate.Status);
|
||||
Assert.StartsWith("approval-rejected", gate.StatusReason, StringComparison.Ordinal);
|
||||
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_PolicyGate_ClearsPendingReason()
|
||||
{
|
||||
var plan = BuildPolicyPlan();
|
||||
var graph = new PackRunExecutionGraphBuilder().Build(plan);
|
||||
var state = CreateInitialState(plan, graph);
|
||||
var coordinator = PackRunApprovalCoordinator.Create(plan, RequestedAt);
|
||||
|
||||
var result = PackRunGateStateUpdater.Apply(state, graph, coordinator, UpdateTimestamp);
|
||||
|
||||
Assert.False(result.HasBlockingFailure);
|
||||
|
||||
var gate = result.State.Steps["policy-check"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Succeeded, gate.Status);
|
||||
Assert.Null(gate.StatusReason);
|
||||
Assert.Equal(UpdateTimestamp, gate.LastTransitionAt);
|
||||
|
||||
var prepare = result.State.Steps["prepare"];
|
||||
Assert.Equal(PackRunStepExecutionStatus.Pending, prepare.Status);
|
||||
Assert.Null(prepare.StatusReason);
|
||||
}
|
||||
|
||||
private static TaskPackPlan BuildApprovalPlan()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Sample);
|
||||
var planner = new TaskPackPlanner();
|
||||
var inputs = new Dictionary<string, System.Text.Json.Nodes.JsonNode?>
|
||||
{
|
||||
["dryRun"] = System.Text.Json.Nodes.JsonValue.Create(false)
|
||||
};
|
||||
|
||||
return planner.Plan(manifest, inputs).Plan!;
|
||||
}
|
||||
|
||||
private static TaskPackPlan BuildPolicyPlan()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.PolicyGate);
|
||||
var planner = new TaskPackPlanner();
|
||||
return planner.Plan(manifest).Plan!;
|
||||
}
|
||||
|
||||
private static PackRunState CreateInitialState(TaskPackPlan plan, PackRunExecutionGraph graph)
|
||||
{
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var step in EnumerateSteps(graph.Steps))
|
||||
{
|
||||
var status = PackRunStepExecutionStatus.Pending;
|
||||
string? reason = null;
|
||||
|
||||
if (!step.Enabled)
|
||||
{
|
||||
status = PackRunStepExecutionStatus.Skipped;
|
||||
reason = "disabled";
|
||||
}
|
||||
else if (step.Kind == PackRunStepKind.GateApproval)
|
||||
{
|
||||
reason = "requires-approval";
|
||||
}
|
||||
else if (step.Kind == PackRunStepKind.GatePolicy)
|
||||
{
|
||||
reason = "requires-policy";
|
||||
}
|
||||
|
||||
steps[step.Id] = new PackRunStepStateRecord(
|
||||
step.Id,
|
||||
step.Kind,
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
status,
|
||||
Attempts: 0,
|
||||
LastTransitionAt: null,
|
||||
NextAttemptAt: null,
|
||||
StatusReason: reason);
|
||||
}
|
||||
|
||||
return PackRunState.Create("run-1", plan.Hash, graph.FailurePolicy, steps, RequestedAt);
|
||||
}
|
||||
|
||||
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
|
||||
{
|
||||
foreach (var step in steps)
|
||||
{
|
||||
yield return step;
|
||||
|
||||
if (step.Children.Count > 0)
|
||||
{
|
||||
foreach (var child in EnumerateSteps(step.Children))
|
||||
{
|
||||
yield return child;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,75 +1,75 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunSimulationEngineTests
|
||||
{
|
||||
[Fact]
|
||||
public void Simulate_IdentifiesGateStatuses()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.PolicyGate);
|
||||
var planner = new TaskPackPlanner();
|
||||
var plan = planner.Plan(manifest).Plan!;
|
||||
|
||||
var engine = new PackRunSimulationEngine();
|
||||
var result = engine.Simulate(plan);
|
||||
|
||||
var gate = result.Steps.Single(step => step.Kind == PackRunStepKind.GatePolicy);
|
||||
Assert.Equal(PackRunSimulationStatus.RequiresPolicy, gate.Status);
|
||||
|
||||
var run = result.Steps.Single(step => step.Kind == PackRunStepKind.Run);
|
||||
Assert.Equal(PackRunSimulationStatus.Pending, run.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Simulate_MarksDisabledStepsAndOutputs()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Sample);
|
||||
var planner = new TaskPackPlanner();
|
||||
var inputs = new Dictionary<string, JsonNode?>
|
||||
{
|
||||
["dryRun"] = JsonValue.Create(true)
|
||||
};
|
||||
|
||||
var plan = planner.Plan(manifest, inputs).Plan!;
|
||||
|
||||
var engine = new PackRunSimulationEngine();
|
||||
var result = engine.Simulate(plan);
|
||||
|
||||
var applyStep = result.Steps.Single(step => step.Id == "apply-step");
|
||||
Assert.Equal(PackRunSimulationStatus.Skipped, applyStep.Status);
|
||||
|
||||
Assert.Empty(result.Outputs);
|
||||
Assert.Equal(PackRunExecutionGraph.DefaultFailurePolicy.MaxAttempts, result.FailurePolicy.MaxAttempts);
|
||||
Assert.Equal(PackRunExecutionGraph.DefaultFailurePolicy.BackoffSeconds, result.FailurePolicy.BackoffSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Simulate_ProjectsOutputsAndRuntimeFlags()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Output);
|
||||
var planner = new TaskPackPlanner();
|
||||
var plan = planner.Plan(manifest).Plan!;
|
||||
|
||||
var engine = new PackRunSimulationEngine();
|
||||
var result = engine.Simulate(plan);
|
||||
|
||||
var step = Assert.Single(result.Steps);
|
||||
Assert.Equal(PackRunStepKind.Run, step.Kind);
|
||||
|
||||
Assert.Collection(result.Outputs,
|
||||
bundle =>
|
||||
{
|
||||
Assert.Equal("bundlePath", bundle.Name);
|
||||
Assert.False(bundle.RequiresRuntimeValue);
|
||||
},
|
||||
evidence =>
|
||||
{
|
||||
Assert.Equal("evidenceModel", evidence.Name);
|
||||
Assert.True(evidence.RequiresRuntimeValue);
|
||||
});
|
||||
}
|
||||
}
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunSimulationEngineTests
|
||||
{
|
||||
[Fact]
|
||||
public void Simulate_IdentifiesGateStatuses()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.PolicyGate);
|
||||
var planner = new TaskPackPlanner();
|
||||
var plan = planner.Plan(manifest).Plan!;
|
||||
|
||||
var engine = new PackRunSimulationEngine();
|
||||
var result = engine.Simulate(plan);
|
||||
|
||||
var gate = result.Steps.Single(step => step.Kind == PackRunStepKind.GatePolicy);
|
||||
Assert.Equal(PackRunSimulationStatus.RequiresPolicy, gate.Status);
|
||||
|
||||
var run = result.Steps.Single(step => step.Kind == PackRunStepKind.Run);
|
||||
Assert.Equal(PackRunSimulationStatus.Pending, run.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Simulate_MarksDisabledStepsAndOutputs()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Sample);
|
||||
var planner = new TaskPackPlanner();
|
||||
var inputs = new Dictionary<string, JsonNode?>
|
||||
{
|
||||
["dryRun"] = JsonValue.Create(true)
|
||||
};
|
||||
|
||||
var plan = planner.Plan(manifest, inputs).Plan!;
|
||||
|
||||
var engine = new PackRunSimulationEngine();
|
||||
var result = engine.Simulate(plan);
|
||||
|
||||
var applyStep = result.Steps.Single(step => step.Id == "apply-step");
|
||||
Assert.Equal(PackRunSimulationStatus.Skipped, applyStep.Status);
|
||||
|
||||
Assert.Empty(result.Outputs);
|
||||
Assert.Equal(PackRunExecutionGraph.DefaultFailurePolicy.MaxAttempts, result.FailurePolicy.MaxAttempts);
|
||||
Assert.Equal(PackRunExecutionGraph.DefaultFailurePolicy.BackoffSeconds, result.FailurePolicy.BackoffSeconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Simulate_ProjectsOutputsAndRuntimeFlags()
|
||||
{
|
||||
var manifest = TestManifests.Load(TestManifests.Output);
|
||||
var planner = new TaskPackPlanner();
|
||||
var plan = planner.Plan(manifest).Plan!;
|
||||
|
||||
var engine = new PackRunSimulationEngine();
|
||||
var result = engine.Simulate(plan);
|
||||
|
||||
var step = Assert.Single(result.Steps);
|
||||
Assert.Equal(PackRunStepKind.Run, step.Kind);
|
||||
|
||||
Assert.Collection(result.Outputs,
|
||||
bundle =>
|
||||
{
|
||||
Assert.Equal("bundlePath", bundle.Name);
|
||||
Assert.False(bundle.RequiresRuntimeValue);
|
||||
},
|
||||
evidence =>
|
||||
{
|
||||
Assert.Equal("evidenceModel", evidence.Name);
|
||||
Assert.True(evidence.RequiresRuntimeValue);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,66 +1,66 @@
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunStepStateMachineTests
|
||||
{
|
||||
private static readonly TaskPackPlanFailurePolicy RetryTwicePolicy = new(MaxAttempts: 3, BackoffSeconds: 5, ContinueOnError: false);
|
||||
|
||||
[Fact]
|
||||
public void Start_FromPending_SetsRunning()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var started = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
|
||||
|
||||
Assert.Equal(PackRunStepExecutionStatus.Running, started.Status);
|
||||
Assert.Equal(0, started.Attempts);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompleteSuccess_IncrementsAttempts()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var running = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
|
||||
var completed = PackRunStepStateMachine.CompleteSuccess(running, DateTimeOffset.UnixEpoch.AddSeconds(1));
|
||||
|
||||
Assert.Equal(PackRunStepExecutionStatus.Succeeded, completed.Status);
|
||||
Assert.Equal(1, completed.Attempts);
|
||||
Assert.Null(completed.NextAttemptAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegisterFailure_SchedulesRetryUntilMaxAttempts()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var running = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
|
||||
|
||||
var firstFailure = PackRunStepStateMachine.RegisterFailure(running, DateTimeOffset.UnixEpoch.AddSeconds(2), RetryTwicePolicy);
|
||||
Assert.Equal(PackRunStepFailureOutcome.Retry, firstFailure.Outcome);
|
||||
Assert.Equal(PackRunStepExecutionStatus.Pending, firstFailure.State.Status);
|
||||
Assert.Equal(1, firstFailure.State.Attempts);
|
||||
Assert.Equal(DateTimeOffset.UnixEpoch.AddSeconds(7), firstFailure.State.NextAttemptAt);
|
||||
|
||||
var restarted = PackRunStepStateMachine.Start(firstFailure.State, DateTimeOffset.UnixEpoch.AddSeconds(7));
|
||||
var secondFailure = PackRunStepStateMachine.RegisterFailure(restarted, DateTimeOffset.UnixEpoch.AddSeconds(9), RetryTwicePolicy);
|
||||
Assert.Equal(PackRunStepFailureOutcome.Retry, secondFailure.Outcome);
|
||||
Assert.Equal(2, secondFailure.State.Attempts);
|
||||
|
||||
var finalStart = PackRunStepStateMachine.Start(secondFailure.State, DateTimeOffset.UnixEpoch.AddSeconds(9 + RetryTwicePolicy.BackoffSeconds));
|
||||
var terminalFailure = PackRunStepStateMachine.RegisterFailure(finalStart, DateTimeOffset.UnixEpoch.AddSeconds(20), RetryTwicePolicy);
|
||||
Assert.Equal(PackRunStepFailureOutcome.Abort, terminalFailure.Outcome);
|
||||
Assert.Equal(PackRunStepExecutionStatus.Failed, terminalFailure.State.Status);
|
||||
Assert.Equal(3, terminalFailure.State.Attempts);
|
||||
Assert.Null(terminalFailure.State.NextAttemptAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Skip_FromPending_SetsSkipped()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var skipped = PackRunStepStateMachine.Skip(state, DateTimeOffset.UnixEpoch.AddHours(1));
|
||||
|
||||
Assert.Equal(PackRunStepExecutionStatus.Skipped, skipped.Status);
|
||||
Assert.Equal(0, skipped.Attempts);
|
||||
}
|
||||
}
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunStepStateMachineTests
|
||||
{
|
||||
private static readonly TaskPackPlanFailurePolicy RetryTwicePolicy = new(MaxAttempts: 3, BackoffSeconds: 5, ContinueOnError: false);
|
||||
|
||||
[Fact]
|
||||
public void Start_FromPending_SetsRunning()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var started = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
|
||||
|
||||
Assert.Equal(PackRunStepExecutionStatus.Running, started.Status);
|
||||
Assert.Equal(0, started.Attempts);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompleteSuccess_IncrementsAttempts()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var running = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
|
||||
var completed = PackRunStepStateMachine.CompleteSuccess(running, DateTimeOffset.UnixEpoch.AddSeconds(1));
|
||||
|
||||
Assert.Equal(PackRunStepExecutionStatus.Succeeded, completed.Status);
|
||||
Assert.Equal(1, completed.Attempts);
|
||||
Assert.Null(completed.NextAttemptAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegisterFailure_SchedulesRetryUntilMaxAttempts()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var running = PackRunStepStateMachine.Start(state, DateTimeOffset.UnixEpoch);
|
||||
|
||||
var firstFailure = PackRunStepStateMachine.RegisterFailure(running, DateTimeOffset.UnixEpoch.AddSeconds(2), RetryTwicePolicy);
|
||||
Assert.Equal(PackRunStepFailureOutcome.Retry, firstFailure.Outcome);
|
||||
Assert.Equal(PackRunStepExecutionStatus.Pending, firstFailure.State.Status);
|
||||
Assert.Equal(1, firstFailure.State.Attempts);
|
||||
Assert.Equal(DateTimeOffset.UnixEpoch.AddSeconds(7), firstFailure.State.NextAttemptAt);
|
||||
|
||||
var restarted = PackRunStepStateMachine.Start(firstFailure.State, DateTimeOffset.UnixEpoch.AddSeconds(7));
|
||||
var secondFailure = PackRunStepStateMachine.RegisterFailure(restarted, DateTimeOffset.UnixEpoch.AddSeconds(9), RetryTwicePolicy);
|
||||
Assert.Equal(PackRunStepFailureOutcome.Retry, secondFailure.Outcome);
|
||||
Assert.Equal(2, secondFailure.State.Attempts);
|
||||
|
||||
var finalStart = PackRunStepStateMachine.Start(secondFailure.State, DateTimeOffset.UnixEpoch.AddSeconds(9 + RetryTwicePolicy.BackoffSeconds));
|
||||
var terminalFailure = PackRunStepStateMachine.RegisterFailure(finalStart, DateTimeOffset.UnixEpoch.AddSeconds(20), RetryTwicePolicy);
|
||||
Assert.Equal(PackRunStepFailureOutcome.Abort, terminalFailure.Outcome);
|
||||
Assert.Equal(PackRunStepExecutionStatus.Failed, terminalFailure.State.Status);
|
||||
Assert.Equal(3, terminalFailure.State.Attempts);
|
||||
Assert.Null(terminalFailure.State.NextAttemptAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Skip_FromPending_SetsSkipped()
|
||||
{
|
||||
var state = PackRunStepStateMachine.Create();
|
||||
var skipped = PackRunStepStateMachine.Skip(state, DateTimeOffset.UnixEpoch.AddHours(1));
|
||||
|
||||
Assert.Equal(PackRunStepExecutionStatus.Skipped, skipped.Status);
|
||||
Assert.Equal(0, skipped.Attempts);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,242 +1,242 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Core.TaskPacks;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
using StellaOps.TaskRunner.WebService;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
builder.Services.Configure<TaskRunnerServiceOptions>(builder.Configuration.GetSection("TaskRunner"));
|
||||
builder.Services.AddSingleton<TaskPackManifestLoader>();
|
||||
builder.Services.AddSingleton<TaskPackPlanner>();
|
||||
builder.Services.AddSingleton<PackRunSimulationEngine>();
|
||||
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
|
||||
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
|
||||
return new FilePackRunStateStore(options.RunStatePath);
|
||||
});
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
if (app.Environment.IsDevelopment())
|
||||
{
|
||||
app.MapOpenApi();
|
||||
}
|
||||
|
||||
app.MapPost("/v1/task-runner/simulations", async (
|
||||
[FromBody] SimulationRequest request,
|
||||
TaskPackManifestLoader loader,
|
||||
TaskPackPlanner planner,
|
||||
PackRunSimulationEngine simulationEngine,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Manifest))
|
||||
{
|
||||
return Results.BadRequest(new { error = "Manifest is required." });
|
||||
}
|
||||
|
||||
TaskPackManifest manifest;
|
||||
try
|
||||
{
|
||||
manifest = loader.Deserialize(request.Manifest);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
|
||||
}
|
||||
|
||||
var inputs = ConvertInputs(request.Inputs);
|
||||
var planResult = planner.Plan(manifest, inputs);
|
||||
if (!planResult.Success || planResult.Plan is null)
|
||||
{
|
||||
return Results.BadRequest(new
|
||||
{
|
||||
errors = planResult.Errors.Select(error => new { error.Path, error.Message })
|
||||
});
|
||||
}
|
||||
|
||||
var plan = planResult.Plan;
|
||||
var simulation = simulationEngine.Simulate(plan);
|
||||
var response = SimulationMapper.ToResponse(plan, simulation);
|
||||
return Results.Ok(response);
|
||||
}).WithName("SimulateTaskPack");
|
||||
|
||||
app.MapGet("/v1/task-runner/runs/{runId}", async (
|
||||
string runId,
|
||||
IPackRunStateStore stateStore,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(runId))
|
||||
{
|
||||
return Results.BadRequest(new { error = "runId is required." });
|
||||
}
|
||||
|
||||
var state = await stateStore.GetAsync(runId, cancellationToken).ConfigureAwait(false);
|
||||
if (state is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(RunStateMapper.ToResponse(state));
|
||||
}).WithName("GetRunState");
|
||||
|
||||
app.MapGet("/", () => Results.Redirect("/openapi"));
|
||||
|
||||
app.Run();
|
||||
|
||||
static IDictionary<string, JsonNode?>? ConvertInputs(JsonObject? node)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var dictionary = new Dictionary<string, JsonNode?>(StringComparer.Ordinal);
|
||||
foreach (var property in node)
|
||||
{
|
||||
dictionary[property.Key] = property.Value?.DeepClone();
|
||||
}
|
||||
|
||||
return dictionary;
|
||||
}
|
||||
|
||||
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
|
||||
|
||||
internal sealed record SimulationResponse(
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
IReadOnlyList<SimulationStepResponse> Steps,
|
||||
IReadOnlyList<SimulationOutputResponse> Outputs,
|
||||
bool HasPendingApprovals);
|
||||
|
||||
internal sealed record SimulationStepResponse(
|
||||
string Id,
|
||||
string TemplateId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
string Status,
|
||||
string? StatusReason,
|
||||
string? Uses,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
int? MaxParallel,
|
||||
bool ContinueOnError,
|
||||
IReadOnlyList<SimulationStepResponse> Children);
|
||||
|
||||
internal sealed record SimulationOutputResponse(
|
||||
string Name,
|
||||
string Type,
|
||||
bool RequiresRuntimeValue,
|
||||
string? PathExpression,
|
||||
string? ValueExpression);
|
||||
|
||||
internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
|
||||
|
||||
internal sealed record RunStateResponse(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<RunStateStepResponse> Steps);
|
||||
|
||||
internal sealed record RunStateStepResponse(
|
||||
string StepId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
string Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
|
||||
internal static class SimulationMapper
|
||||
{
|
||||
public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
|
||||
{
|
||||
var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = result.Steps.Select(MapStep).ToList();
|
||||
var outputs = result.Outputs.Select(MapOutput).ToList();
|
||||
|
||||
return new SimulationResponse(
|
||||
plan.Hash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
steps,
|
||||
outputs,
|
||||
result.HasPendingApprovals);
|
||||
}
|
||||
|
||||
private static SimulationStepResponse MapStep(PackRunSimulationNode node)
|
||||
{
|
||||
var children = node.Children.Select(MapStep).ToList();
|
||||
return new SimulationStepResponse(
|
||||
node.Id,
|
||||
node.TemplateId,
|
||||
node.Kind.ToString(),
|
||||
node.Enabled,
|
||||
node.Status.ToString(),
|
||||
node.Status.ToString() switch
|
||||
{
|
||||
nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
|
||||
nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
|
||||
nameof(PackRunSimulationStatus.Skipped) => "condition-false",
|
||||
_ => null
|
||||
},
|
||||
node.Uses,
|
||||
node.ApprovalId,
|
||||
node.GateMessage,
|
||||
node.MaxParallel,
|
||||
node.ContinueOnError,
|
||||
children);
|
||||
}
|
||||
|
||||
private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
|
||||
=> new(
|
||||
output.Name,
|
||||
output.Type,
|
||||
output.RequiresRuntimeValue,
|
||||
output.Path?.Expression,
|
||||
output.Expression?.Expression);
|
||||
}
|
||||
|
||||
internal static class RunStateMapper
|
||||
{
|
||||
public static RunStateResponse ToResponse(PackRunState state)
|
||||
{
|
||||
var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new RunStateStepResponse(
|
||||
step.StepId,
|
||||
step.Kind.ToString(),
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status.ToString(),
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new RunStateResponse(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Execution.Simulation;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Core.TaskPacks;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
using StellaOps.TaskRunner.WebService;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
builder.Services.Configure<TaskRunnerServiceOptions>(builder.Configuration.GetSection("TaskRunner"));
|
||||
builder.Services.AddSingleton<TaskPackManifestLoader>();
|
||||
builder.Services.AddSingleton<TaskPackPlanner>();
|
||||
builder.Services.AddSingleton<PackRunSimulationEngine>();
|
||||
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
|
||||
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
|
||||
return new FilePackRunStateStore(options.RunStatePath);
|
||||
});
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
if (app.Environment.IsDevelopment())
|
||||
{
|
||||
app.MapOpenApi();
|
||||
}
|
||||
|
||||
app.MapPost("/v1/task-runner/simulations", async (
|
||||
[FromBody] SimulationRequest request,
|
||||
TaskPackManifestLoader loader,
|
||||
TaskPackPlanner planner,
|
||||
PackRunSimulationEngine simulationEngine,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Manifest))
|
||||
{
|
||||
return Results.BadRequest(new { error = "Manifest is required." });
|
||||
}
|
||||
|
||||
TaskPackManifest manifest;
|
||||
try
|
||||
{
|
||||
manifest = loader.Deserialize(request.Manifest);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
|
||||
}
|
||||
|
||||
var inputs = ConvertInputs(request.Inputs);
|
||||
var planResult = planner.Plan(manifest, inputs);
|
||||
if (!planResult.Success || planResult.Plan is null)
|
||||
{
|
||||
return Results.BadRequest(new
|
||||
{
|
||||
errors = planResult.Errors.Select(error => new { error.Path, error.Message })
|
||||
});
|
||||
}
|
||||
|
||||
var plan = planResult.Plan;
|
||||
var simulation = simulationEngine.Simulate(plan);
|
||||
var response = SimulationMapper.ToResponse(plan, simulation);
|
||||
return Results.Ok(response);
|
||||
}).WithName("SimulateTaskPack");
|
||||
|
||||
app.MapGet("/v1/task-runner/runs/{runId}", async (
|
||||
string runId,
|
||||
IPackRunStateStore stateStore,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(runId))
|
||||
{
|
||||
return Results.BadRequest(new { error = "runId is required." });
|
||||
}
|
||||
|
||||
var state = await stateStore.GetAsync(runId, cancellationToken).ConfigureAwait(false);
|
||||
if (state is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(RunStateMapper.ToResponse(state));
|
||||
}).WithName("GetRunState");
|
||||
|
||||
app.MapGet("/", () => Results.Redirect("/openapi"));
|
||||
|
||||
app.Run();
|
||||
|
||||
static IDictionary<string, JsonNode?>? ConvertInputs(JsonObject? node)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var dictionary = new Dictionary<string, JsonNode?>(StringComparer.Ordinal);
|
||||
foreach (var property in node)
|
||||
{
|
||||
dictionary[property.Key] = property.Value?.DeepClone();
|
||||
}
|
||||
|
||||
return dictionary;
|
||||
}
|
||||
|
||||
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
|
||||
|
||||
internal sealed record SimulationResponse(
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
IReadOnlyList<SimulationStepResponse> Steps,
|
||||
IReadOnlyList<SimulationOutputResponse> Outputs,
|
||||
bool HasPendingApprovals);
|
||||
|
||||
internal sealed record SimulationStepResponse(
|
||||
string Id,
|
||||
string TemplateId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
string Status,
|
||||
string? StatusReason,
|
||||
string? Uses,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
int? MaxParallel,
|
||||
bool ContinueOnError,
|
||||
IReadOnlyList<SimulationStepResponse> Children);
|
||||
|
||||
internal sealed record SimulationOutputResponse(
|
||||
string Name,
|
||||
string Type,
|
||||
bool RequiresRuntimeValue,
|
||||
string? PathExpression,
|
||||
string? ValueExpression);
|
||||
|
||||
internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
|
||||
|
||||
internal sealed record RunStateResponse(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<RunStateStepResponse> Steps);
|
||||
|
||||
internal sealed record RunStateStepResponse(
|
||||
string StepId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
string Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
|
||||
internal static class SimulationMapper
|
||||
{
|
||||
public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
|
||||
{
|
||||
var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = result.Steps.Select(MapStep).ToList();
|
||||
var outputs = result.Outputs.Select(MapOutput).ToList();
|
||||
|
||||
return new SimulationResponse(
|
||||
plan.Hash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
steps,
|
||||
outputs,
|
||||
result.HasPendingApprovals);
|
||||
}
|
||||
|
||||
private static SimulationStepResponse MapStep(PackRunSimulationNode node)
|
||||
{
|
||||
var children = node.Children.Select(MapStep).ToList();
|
||||
return new SimulationStepResponse(
|
||||
node.Id,
|
||||
node.TemplateId,
|
||||
node.Kind.ToString(),
|
||||
node.Enabled,
|
||||
node.Status.ToString(),
|
||||
node.Status.ToString() switch
|
||||
{
|
||||
nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
|
||||
nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
|
||||
nameof(PackRunSimulationStatus.Skipped) => "condition-false",
|
||||
_ => null
|
||||
},
|
||||
node.Uses,
|
||||
node.ApprovalId,
|
||||
node.GateMessage,
|
||||
node.MaxParallel,
|
||||
node.ContinueOnError,
|
||||
children);
|
||||
}
|
||||
|
||||
private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
|
||||
=> new(
|
||||
output.Name,
|
||||
output.Type,
|
||||
output.RequiresRuntimeValue,
|
||||
output.Path?.Expression,
|
||||
output.Expression?.Expression);
|
||||
}
|
||||
|
||||
internal static class RunStateMapper
|
||||
{
|
||||
public static RunStateResponse ToResponse(PackRunState state)
|
||||
{
|
||||
var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new RunStateStepResponse(
|
||||
step.StepId,
|
||||
step.Kind.ToString(),
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status.ToString(),
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new RunStateResponse(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
namespace StellaOps.TaskRunner.WebService;
|
||||
|
||||
public sealed class TaskRunnerServiceOptions
|
||||
{
|
||||
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
|
||||
}
|
||||
namespace StellaOps.TaskRunner.WebService;
|
||||
|
||||
public sealed class TaskRunnerServiceOptions
|
||||
{
|
||||
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,22 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Mongo2Go" Version="4.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../../Signals/StellaOps.Signals/StellaOps.Signals.csproj" />
|
||||
</ItemGroup>
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Mongo2Go" Version="4.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../../Signals/StellaOps.Signals/StellaOps.Signals.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user