feat: Implement approvals workflow and notifications integration
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Added approvals orchestration with persistence and workflow scaffolding. - Integrated notifications insights and staged resume hooks. - Introduced approval coordinator and policy notification bridge with unit tests. - Added approval decision API with resume requeue and persisted plan snapshots. - Documented the Excitor consensus API beta and provided JSON sample payload. - Created analyzers to flag usage of deprecated merge service APIs. - Implemented logging for artifact uploads and approval decision service. - Added tests for PackRunApprovalDecisionService and related components.
This commit is contained in:
@@ -730,10 +730,11 @@ internal static class CommandFactory
|
||||
};
|
||||
activate.Add(activatePolicyIdArgument);
|
||||
|
||||
var activateVersionOption = new Option<int>("--version")
|
||||
{
|
||||
Description = "Revision version to activate."
|
||||
};
|
||||
var activateVersionOption = new Option<int>("--version")
|
||||
{
|
||||
Description = "Revision version to activate.",
|
||||
IsRequired = true
|
||||
};
|
||||
|
||||
var activationNoteOption = new Option<string?>("--note")
|
||||
{
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
| CLI-POLICY-23-004 | TODO | DevEx/CLI Guild | WEB-POLICY-23-001 | Add `stella policy lint` command validating SPL files with compiler diagnostics; support JSON output. | Command returns lint diagnostics; exit codes documented; tests cover error scenarios. |
|
||||
| CLI-POLICY-23-005 | DOING (2025-10-28) | DevEx/CLI Guild | POLICY-GATEWAY-18-002..003, WEB-POLICY-23-002 | Implement `stella policy activate` with scheduling window, approval enforcement, and summary output. | Activation command integrates with API, handles 2-person rule failures; tests cover success/error. |
|
||||
> 2025-10-28: CLI command implemented with gateway integration (`policy activate`), interactive summary output, retry-aware metrics, and exit codes (0 success, 75 pending second approval). Tests cover success/pending/error paths.
|
||||
> 2025-11-06: Tightened required `--version` parsing, added scheduled activation handling coverage, and expanded tests to validate timestamp normalization.
|
||||
| CLI-POLICY-23-006 | TODO | DevEx/CLI Guild | WEB-POLICY-23-004 | Provide `stella policy history` and `stella policy explain` commands to pull run history and explanation trees. | Commands output JSON/table; integration tests with fixtures; docs updated. |
|
||||
|
||||
## Graph & Vuln Explorer v1
|
||||
|
||||
@@ -1811,11 +1811,11 @@ spec:
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandlePolicyActivateAsync_PendingSecondApprovalSetsExitCode()
|
||||
{
|
||||
var originalExit = Environment.ExitCode;
|
||||
|
||||
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null));
|
||||
public async Task HandlePolicyActivateAsync_PendingSecondApprovalSetsExitCode()
|
||||
{
|
||||
var originalExit = Environment.ExitCode;
|
||||
|
||||
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null));
|
||||
backend.ActivationResult = new PolicyActivationResult(
|
||||
"pending_second_approval",
|
||||
new PolicyActivationRevision(
|
||||
@@ -1852,15 +1852,65 @@ spec:
|
||||
finally
|
||||
{
|
||||
Environment.ExitCode = originalExit;
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandlePolicyActivateAsync_MapsErrorCodes()
|
||||
{
|
||||
var originalExit = Environment.ExitCode;
|
||||
|
||||
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandlePolicyActivateAsync_ParsesScheduledTimestamp()
|
||||
{
|
||||
var originalExit = Environment.ExitCode;
|
||||
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null));
|
||||
backend.ActivationResult = new PolicyActivationResult(
|
||||
"scheduled",
|
||||
new PolicyActivationRevision(
|
||||
"P-8",
|
||||
5,
|
||||
"approved",
|
||||
false,
|
||||
DateTimeOffset.Parse("2025-12-01T00:30:00Z", CultureInfo.InvariantCulture),
|
||||
null,
|
||||
new ReadOnlyCollection<PolicyActivationApproval>(Array.Empty<PolicyActivationApproval>())));
|
||||
|
||||
var provider = BuildServiceProvider(backend);
|
||||
|
||||
try
|
||||
{
|
||||
const string scheduledValue = "2025-12-01T03:00:00+02:00";
|
||||
await CommandHandlers.HandlePolicyActivateAsync(
|
||||
provider,
|
||||
policyId: "P-8",
|
||||
version: 5,
|
||||
note: null,
|
||||
runNow: false,
|
||||
scheduledAt: scheduledValue,
|
||||
priority: null,
|
||||
rollback: false,
|
||||
incidentId: null,
|
||||
verbose: false,
|
||||
cancellationToken: CancellationToken.None);
|
||||
|
||||
Assert.Equal(0, Environment.ExitCode);
|
||||
Assert.NotNull(backend.LastPolicyActivation);
|
||||
var activation = backend.LastPolicyActivation!.Value;
|
||||
Assert.False(activation.Request.RunNow);
|
||||
var expected = DateTimeOffset.Parse(
|
||||
scheduledValue,
|
||||
CultureInfo.InvariantCulture,
|
||||
DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal);
|
||||
Assert.Equal(expected, activation.Request.ScheduledAt);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.ExitCode = originalExit;
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandlePolicyActivateAsync_MapsErrorCodes()
|
||||
{
|
||||
var originalExit = Environment.ExitCode;
|
||||
|
||||
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
|
||||
{
|
||||
ActivationException = new PolicyApiException("Revision not approved", HttpStatusCode.BadRequest, "ERR_POL_002")
|
||||
};
|
||||
|
||||
@@ -52,9 +52,11 @@ internal static class JobRegistrationExtensions
|
||||
new("source:vndr-oracle:parse", "StellaOps.Concelier.Connector.Vndr.Oracle.OracleParseJob", "StellaOps.Concelier.Connector.Vndr.Oracle", TimeSpan.FromMinutes(15), TimeSpan.FromMinutes(5)),
|
||||
new("source:vndr-oracle:map", "StellaOps.Concelier.Connector.Vndr.Oracle.OracleMapJob", "StellaOps.Concelier.Connector.Vndr.Oracle", TimeSpan.FromMinutes(15), TimeSpan.FromMinutes(5)),
|
||||
|
||||
new("export:json", "StellaOps.Concelier.Exporter.Json.JsonExportJob", "StellaOps.Concelier.Exporter.Json", TimeSpan.FromMinutes(10), TimeSpan.FromMinutes(5)),
|
||||
new("export:json", "StellaOps.Concelier.Exporter.Json.JsonExportJob", "StellaOps.Concelier.Exporter.Json", TimeSpan.FromMinutes(10), TimeSpan.FromMinutes(5)),
|
||||
new("export:trivy-db", "StellaOps.Concelier.Exporter.TrivyDb.TrivyDbExportJob", "StellaOps.Concelier.Exporter.TrivyDb", TimeSpan.FromMinutes(20), TimeSpan.FromMinutes(10)),
|
||||
#pragma warning disable CS0618, CONCELIER0001 // Legacy merge job remains available until MERGE-LNM-21-002 completes.
|
||||
new("merge:reconcile", "StellaOps.Concelier.Merge.Jobs.MergeReconcileJob", "StellaOps.Concelier.Merge", TimeSpan.FromMinutes(15), TimeSpan.FromMinutes(5))
|
||||
#pragma warning restore CS0618, CONCELIER0001
|
||||
};
|
||||
|
||||
public static IServiceCollection AddBuiltInConcelierJobs(this IServiceCollection services)
|
||||
|
||||
@@ -15,6 +15,8 @@ public sealed class ConcelierOptions
|
||||
public AuthorityOptions Authority { get; set; } = new();
|
||||
|
||||
public MirrorOptions Mirror { get; set; } = new();
|
||||
|
||||
public FeaturesOptions Features { get; set; } = new();
|
||||
|
||||
public sealed class StorageOptions
|
||||
{
|
||||
@@ -135,4 +137,13 @@ public sealed class ConcelierOptions
|
||||
|
||||
public int MaxDownloadRequestsPerHour { get; set; } = 1200;
|
||||
}
|
||||
|
||||
public sealed class FeaturesOptions
|
||||
{
|
||||
public bool NoMergeEnabled { get; set; }
|
||||
|
||||
public bool LnmShadowWrites { get; set; } = true;
|
||||
|
||||
public IList<string> MergeJobAllowlist { get; } = new List<string>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,8 @@ public static class ConcelierOptionsPostConfigure
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
|
||||
options.Authority ??= new ConcelierOptions.AuthorityOptions();
|
||||
options.Authority ??= new ConcelierOptions.AuthorityOptions();
|
||||
options.Features ??= new ConcelierOptions.FeaturesOptions();
|
||||
|
||||
var authority = options.Authority;
|
||||
if (string.IsNullOrWhiteSpace(authority.ClientSecret)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
@@ -98,9 +99,36 @@ builder.Services.AddConcelierLinksetMappers();
|
||||
builder.Services.AddAdvisoryRawServices();
|
||||
builder.Services.AddSingleton<IAdvisoryObservationQueryService, AdvisoryObservationQueryService>();
|
||||
|
||||
builder.Services.AddMergeModule(builder.Configuration);
|
||||
var features = concelierOptions.Features ?? new ConcelierOptions.FeaturesOptions();
|
||||
|
||||
if (!features.NoMergeEnabled)
|
||||
{
|
||||
#pragma warning disable CS0618, CONCELIER0001, CONCELIER0002 // Legacy merge service is intentionally supported behind a feature toggle.
|
||||
builder.Services.AddMergeModule(builder.Configuration);
|
||||
#pragma warning restore CS0618, CONCELIER0001, CONCELIER0002
|
||||
}
|
||||
|
||||
builder.Services.AddJobScheduler();
|
||||
builder.Services.AddBuiltInConcelierJobs();
|
||||
builder.Services.PostConfigure<JobSchedulerOptions>(options =>
|
||||
{
|
||||
if (features.NoMergeEnabled)
|
||||
{
|
||||
options.Definitions.Remove("merge:reconcile");
|
||||
return;
|
||||
}
|
||||
|
||||
if (features.MergeJobAllowlist is { Count: > 0 })
|
||||
{
|
||||
var allowMergeJob = features.MergeJobAllowlist.Any(value =>
|
||||
string.Equals(value, "merge:reconcile", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!allowMergeJob)
|
||||
{
|
||||
options.Definitions.Remove("merge:reconcile");
|
||||
}
|
||||
}
|
||||
});
|
||||
builder.Services.AddSingleton<OpenApiDiscoveryDocumentProvider>();
|
||||
|
||||
builder.Services.AddSingleton<ServiceStatus>(sp => new ServiceStatus(sp.GetRequiredService<TimeProvider>()));
|
||||
@@ -183,7 +211,7 @@ if (authorityConfigured)
|
||||
builder.Services.AddAuthorization(options =>
|
||||
{
|
||||
options.AddStellaOpsScopePolicy(JobsPolicyName, concelierOptions.Authority.RequiredScopes.ToArray());
|
||||
options.AddStellaOpsScopePolicy(ObservationsPolicyName, StellaOpsScopes.VulnRead);
|
||||
options.AddStellaOpsScopePolicy(ObservationsPolicyName, StellaOpsScopes.VulnView);
|
||||
options.AddStellaOpsScopePolicy(AdvisoryIngestPolicyName, StellaOpsScopes.AdvisoryIngest);
|
||||
options.AddStellaOpsScopePolicy(AdvisoryReadPolicyName, StellaOpsScopes.AdvisoryRead);
|
||||
options.AddStellaOpsScopePolicy(AocVerifyPolicyName, StellaOpsScopes.AdvisoryRead, StellaOpsScopes.AocVerify);
|
||||
@@ -197,6 +225,11 @@ builder.Services.AddEndpointsApiExplorer();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
if (features.NoMergeEnabled)
|
||||
{
|
||||
app.Logger.LogWarning("Legacy merge module disabled via concelier:features:noMergeEnabled; Link-Not-Merge mode active.");
|
||||
}
|
||||
|
||||
var resolvedConcelierOptions = app.Services.GetRequiredService<IOptions<ConcelierOptions>>().Value;
|
||||
var resolvedAuthority = resolvedConcelierOptions.Authority ?? new ConcelierOptions.AuthorityOptions();
|
||||
authorityConfigured = resolvedAuthority.Enabled;
|
||||
|
||||
@@ -35,5 +35,8 @@
|
||||
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" />
|
||||
<ProjectReference Include="../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" />
|
||||
<ProjectReference Include="../../Aoc/__Libraries/StellaOps.Aoc.AspNetCore/StellaOps.Aoc.AspNetCore.csproj" />
|
||||
<ProjectReference Include="../__Analyzers/StellaOps.Concelier.Analyzers/StellaOps.Concelier.Analyzers.csproj"
|
||||
OutputItemType="Analyzer"
|
||||
ReferenceOutputAssembly="false" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,2 @@
|
||||
; Shipped analyzer releases
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
## Release History
|
||||
|
||||
### Unreleased
|
||||
|
||||
#### New Rules
|
||||
|
||||
Rule ID | Title | Notes
|
||||
--------|-------|------
|
||||
CONCELIER0002 | Legacy merge pipeline is disabled | Flags usage of `AddMergeModule` and `AdvisoryMergeService`.
|
||||
@@ -0,0 +1,152 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.CodeAnalysis;
|
||||
using Microsoft.CodeAnalysis.Diagnostics;
|
||||
using Microsoft.CodeAnalysis.Operations;
|
||||
|
||||
namespace StellaOps.Concelier.Analyzers;
|
||||
|
||||
/// <summary>
|
||||
/// Analyzer that flags usages of the legacy merge service APIs.
|
||||
/// </summary>
|
||||
[DiagnosticAnalyzer(LanguageNames.CSharp)]
|
||||
public sealed class NoMergeUsageAnalyzer : DiagnosticAnalyzer
|
||||
{
|
||||
/// <summary>
|
||||
/// Diagnostic identifier for legacy merge usage violations.
|
||||
/// </summary>
|
||||
public const string DiagnosticId = "CONCELIER0002";
|
||||
|
||||
private const string Category = "Usage";
|
||||
private const string MergeExtensionType = "StellaOps.Concelier.Merge.MergeServiceCollectionExtensions";
|
||||
private const string MergeServiceType = "StellaOps.Concelier.Merge.Services.AdvisoryMergeService";
|
||||
|
||||
private static readonly LocalizableString Title = "Legacy merge pipeline is disabled";
|
||||
private static readonly LocalizableString MessageFormat = "Do not reference the legacy Concelier merge pipeline (type '{0}')";
|
||||
private static readonly LocalizableString Description =
|
||||
"The legacy Concelier merge service is deprecated under MERGE-LNM-21-002. "
|
||||
+ "Switch to observation/linkset APIs or guard calls behind the concelier:features:noMergeEnabled toggle.";
|
||||
|
||||
private static readonly DiagnosticDescriptor Rule = new(
|
||||
DiagnosticId,
|
||||
Title,
|
||||
MessageFormat,
|
||||
Category,
|
||||
DiagnosticSeverity.Error,
|
||||
isEnabledByDefault: true,
|
||||
description: Description,
|
||||
helpLinkUri: "https://stella-ops.org/docs/migration/no-merge");
|
||||
|
||||
/// <inheritdoc />
|
||||
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(Rule);
|
||||
|
||||
/// <inheritdoc />
|
||||
public override void Initialize(AnalysisContext context)
|
||||
{
|
||||
context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
|
||||
context.EnableConcurrentExecution();
|
||||
|
||||
context.RegisterOperationAction(AnalyzeInvocation, OperationKind.Invocation);
|
||||
context.RegisterOperationAction(AnalyzeObjectCreation, OperationKind.ObjectCreation);
|
||||
}
|
||||
|
||||
private static void AnalyzeInvocation(OperationAnalysisContext context)
|
||||
{
|
||||
if (context.Operation is not IInvocationOperation invocation)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var targetMethod = invocation.TargetMethod;
|
||||
if (targetMethod is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!SymbolEquals(targetMethod.ContainingType, MergeExtensionType))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!string.Equals(targetMethod.Name, "AddMergeModule", StringComparison.Ordinal))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsAllowedAssembly(context.ContainingSymbol.ContainingAssembly))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
ReportDiagnostic(context, invocation.Syntax, $"{MergeExtensionType}.{targetMethod.Name}");
|
||||
}
|
||||
|
||||
private static void AnalyzeObjectCreation(OperationAnalysisContext context)
|
||||
{
|
||||
if (context.Operation is not IObjectCreationOperation creation)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var createdType = creation.Type;
|
||||
if (createdType is null || !SymbolEquals(createdType, MergeServiceType))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (IsAllowedAssembly(context.ContainingSymbol.ContainingAssembly))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
ReportDiagnostic(context, creation.Syntax, MergeServiceType);
|
||||
}
|
||||
|
||||
private static bool SymbolEquals(ITypeSymbol? symbol, string fullName)
|
||||
{
|
||||
if (symbol is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var display = symbol.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat);
|
||||
if (display.StartsWith("global::", StringComparison.Ordinal))
|
||||
{
|
||||
display = display.Substring("global::".Length);
|
||||
}
|
||||
|
||||
return string.Equals(display, fullName, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static bool IsAllowedAssembly(IAssemblySymbol? assemblySymbol)
|
||||
{
|
||||
if (assemblySymbol is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var assemblyName = assemblySymbol.Name;
|
||||
if (string.IsNullOrWhiteSpace(assemblyName))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (assemblyName.StartsWith("StellaOps.Concelier.Merge", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (assemblyName.EndsWith(".Analyzers", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static void ReportDiagnostic(OperationAnalysisContext context, SyntaxNode syntax, string targetName)
|
||||
{
|
||||
var diagnostic = Diagnostic.Create(Rule, syntax.GetLocation(), targetName);
|
||||
context.ReportDiagnostic(diagnostic);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netstandard2.0</TargetFramework>
|
||||
<AssemblyName>StellaOps.Concelier.Analyzers</AssemblyName>
|
||||
<RootNamespace>StellaOps.Concelier.Analyzers</RootNamespace>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>latest</LangVersion>
|
||||
<IncludeBuildOutput>false</IncludeBuildOutput>
|
||||
<GenerateDocumentationFile>true</GenerateDocumentationFile>
|
||||
<EnforceExtendedAnalyzerRules>true</EnforceExtendedAnalyzerRules>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.9.2" PrivateAssets="all" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.Analyzers" Version="3.11.0" PrivateAssets="all" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -114,10 +114,10 @@ internal sealed class AdvisoryObservationFactory : IAdvisoryObservationFactory
|
||||
|
||||
private static AdvisoryObservationLinkset CreateLinkset(RawIdentifiers identifiers, RawLinkset linkset)
|
||||
{
|
||||
var aliases = NormalizeAliases(identifiers, linkset);
|
||||
var purls = NormalizePackageUrls(linkset.PackageUrls);
|
||||
var cpes = NormalizeCpes(linkset.Cpes);
|
||||
var references = NormalizeReferences(linkset.References);
|
||||
var aliases = CollectAliases(identifiers, linkset);
|
||||
var purls = CollectValues(linkset.PackageUrls);
|
||||
var cpes = CollectValues(linkset.Cpes);
|
||||
var references = CollectReferences(linkset.References);
|
||||
|
||||
return new AdvisoryObservationLinkset(aliases, purls, cpes, references);
|
||||
}
|
||||
@@ -170,124 +170,91 @@ internal sealed class AdvisoryObservationFactory : IAdvisoryObservationFactory
|
||||
};
|
||||
}
|
||||
|
||||
private static IEnumerable<string> NormalizeAliases(RawIdentifiers identifiers, RawLinkset linkset)
|
||||
{
|
||||
var aliases = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (LinksetNormalization.TryNormalizeAlias(identifiers.PrimaryId, out var primary))
|
||||
{
|
||||
aliases.Add(primary);
|
||||
}
|
||||
|
||||
foreach (var alias in identifiers.Aliases)
|
||||
{
|
||||
if (LinksetNormalization.TryNormalizeAlias(alias, out var normalized))
|
||||
{
|
||||
aliases.Add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var alias in linkset.Aliases)
|
||||
{
|
||||
if (LinksetNormalization.TryNormalizeAlias(alias, out var normalized))
|
||||
{
|
||||
aliases.Add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var note in linkset.Notes)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(note.Value)
|
||||
&& LinksetNormalization.TryNormalizeAlias(note.Value, out var normalized))
|
||||
{
|
||||
aliases.Add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
return aliases
|
||||
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> NormalizePackageUrls(ImmutableArray<string> packageUrls)
|
||||
{
|
||||
if (packageUrls.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var set = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var candidate in packageUrls)
|
||||
{
|
||||
if (!LinksetNormalization.TryNormalizePackageUrl(candidate, out var normalized) || string.IsNullOrEmpty(normalized))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
set.Add(normalized);
|
||||
}
|
||||
|
||||
return set
|
||||
.OrderBy(static value => value, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> NormalizeCpes(ImmutableArray<string> cpes)
|
||||
{
|
||||
if (cpes.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var set = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var cpe in cpes)
|
||||
{
|
||||
if (!LinksetNormalization.TryNormalizeCpe(cpe, out var normalized) || string.IsNullOrEmpty(normalized))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
set.Add(normalized);
|
||||
}
|
||||
|
||||
return set
|
||||
.OrderBy(static value => value, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<AdvisoryObservationReference> NormalizeReferences(ImmutableArray<RawReference> references)
|
||||
{
|
||||
if (references.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<AdvisoryObservationReference>.Empty;
|
||||
}
|
||||
|
||||
var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var list = new List<AdvisoryObservationReference>();
|
||||
|
||||
foreach (var reference in references)
|
||||
{
|
||||
var normalized = LinksetNormalization.TryCreateReference(reference.Type, reference.Url);
|
||||
if (normalized is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!seen.Add(normalized.Url))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(normalized);
|
||||
}
|
||||
|
||||
return list
|
||||
.OrderBy(static reference => reference.Type, StringComparer.Ordinal)
|
||||
.ThenBy(static reference => reference.Url, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
private static IEnumerable<string> CollectAliases(RawIdentifiers identifiers, RawLinkset linkset)
|
||||
{
|
||||
var results = new List<string>();
|
||||
|
||||
AddAlias(results, identifiers.PrimaryId);
|
||||
AddRange(results, identifiers.Aliases);
|
||||
AddRange(results, linkset.Aliases);
|
||||
|
||||
foreach (var note in linkset.Notes)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(note.Value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
results.Add(note.Value.Trim());
|
||||
}
|
||||
|
||||
return results;
|
||||
|
||||
static void AddAlias(ICollection<string> target, string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
target.Add(value.Trim());
|
||||
}
|
||||
|
||||
static void AddRange(ICollection<string> target, ImmutableArray<string> values)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
AddAlias(target, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<string> CollectValues(ImmutableArray<string> values)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var list = new List<string>(values.Length);
|
||||
foreach (var value in values)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(value.Trim());
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private static IEnumerable<AdvisoryObservationReference> CollectReferences(ImmutableArray<RawReference> references)
|
||||
{
|
||||
if (references.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<AdvisoryObservationReference>.Empty;
|
||||
}
|
||||
|
||||
var list = new List<AdvisoryObservationReference>(references.Length);
|
||||
foreach (var reference in references)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(reference.Type) || string.IsNullOrWhiteSpace(reference.Url))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(new AdvisoryObservationReference(reference.Type.Trim(), reference.Url.Trim()));
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
private static ImmutableDictionary<string, string> CreateAttributes(AdvisoryRawDocument rawDocument)
|
||||
{
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
> Docs alignment (2025-10-26): Linkset expectations detailed in AOC reference §4 and policy-engine architecture §2.1.
|
||||
> 2025-10-28: Advisory raw ingestion now strips client-supplied supersedes hints, logs ignored pointers, and surfaces repository-supplied supersedes identifiers; service tests cover duplicate handling and append-only semantics.
|
||||
> Docs alignment (2025-10-26): Deployment guide + observability guide describe supersedes metrics; ensure implementation emits `aoc_violation_total` on failure.
|
||||
| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only.<br>2025-10-29 19:05Z: Audit completed for `AdvisoryRawService`/Mongo repo to confirm alias order/dedup removal persists; identified remaining normalization in observation/linkset factory that will be revised to surface raw duplicates for Policy ingestion. Change sketch + regression matrix drafted under `docs/dev/aoc-normalization-removal-notes.md` (pending commit).<br>2025-10-31 20:45Z: Added raw linkset projection to observations/storage, exposing canonical+raw views, refreshed fixtures/tests, and documented behaviour in models/doc factory.<br>2025-10-31 21:10Z: Coordinated with Policy Engine (POLICY-ENGINE-20-003) on adoption timeline; backfill + consumer readiness tracked in `docs/dev/raw-linkset-backfill-plan.md`. |
|
||||
| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only.<br>2025-10-29 19:05Z: Audit completed for `AdvisoryRawService`/Mongo repo to confirm alias order/dedup removal persists; identified remaining normalization in observation/linkset factory that will be revised to surface raw duplicates for Policy ingestion. Change sketch + regression matrix drafted under `docs/dev/aoc-normalization-removal-notes.md` (pending commit).<br>2025-10-31 20:45Z: Added raw linkset projection to observations/storage, exposing canonical+raw views, refreshed fixtures/tests, and documented behaviour in models/doc factory.<br>2025-10-31 21:10Z: Coordinated with Policy Engine (POLICY-ENGINE-20-003) on adoption timeline; backfill + consumer readiness tracked in `docs/dev/raw-linkset-backfill-plan.md`.<br>2025-11-05 14:25Z: Resuming to document merge-dependent normalization paths and prepare implementation notes for `noMergeEnabled` gating before code changes land.<br>2025-11-05 19:20Z: Observation factory/linkset now preserve upstream ordering + duplicates; canonicalisation responsibility shifts to downstream consumers with refreshed unit coverage.<br>2025-11-06 16:10Z: Updated AOC reference/backfill docs with raw vs canonical guidance and cross-linked analyzer guardrails. |
|
||||
> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout.
|
||||
> 2025-10-29: `AdvisoryRawService` now preserves upstream alias/linkset ordering (trim-only) and updated AOC documentation reflects the behaviour; follow-up to ensure policy consumers handle duplicates remains open.
|
||||
| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. |
|
||||
|
||||
@@ -5,9 +5,10 @@ using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Core.Jobs;
|
||||
using StellaOps.Concelier.Merge.Services;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Jobs;
|
||||
|
||||
public sealed class MergeReconcileJob : IJob
|
||||
namespace StellaOps.Concelier.Merge.Jobs;
|
||||
|
||||
[Obsolete("MergeReconcileJob is deprecated; Link-Not-Merge supersedes merge scheduling. Disable via concelier:features:noMergeEnabled. Tracking MERGE-LNM-21-002.", DiagnosticId = "CONCELIER0001", UrlFormat = "https://stella-ops.org/docs/migration/no-merge")]
|
||||
public sealed class MergeReconcileJob : IJob
|
||||
{
|
||||
private readonly AdvisoryMergeService _mergeService;
|
||||
private readonly ILogger<MergeReconcileJob> _logger;
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
using System;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Core;
|
||||
using StellaOps.Concelier.Merge.Jobs;
|
||||
using StellaOps.Concelier.Merge.Jobs;
|
||||
using StellaOps.Concelier.Merge.Options;
|
||||
using StellaOps.Concelier.Merge.Services;
|
||||
|
||||
namespace StellaOps.Concelier.Merge;
|
||||
|
||||
public static class MergeServiceCollectionExtensions
|
||||
namespace StellaOps.Concelier.Merge;
|
||||
|
||||
[Obsolete("Legacy merge module is deprecated; prefer Link-Not-Merge linkset pipelines. Track MERGE-LNM-21-002 and set concelier:features:noMergeEnabled=true to disable registration.", DiagnosticId = "CONCELIER0001", UrlFormat = "https://stella-ops.org/docs/migration/no-merge")]
|
||||
public static class MergeServiceCollectionExtensions
|
||||
{
|
||||
public static IServiceCollection AddMergeModule(this IServiceCollection services, IConfiguration configuration)
|
||||
{
|
||||
@@ -34,10 +36,12 @@ public static class MergeServiceCollectionExtensions
|
||||
return new AdvisoryPrecedenceMerger(resolver, options, timeProvider, logger);
|
||||
});
|
||||
|
||||
services.TryAddSingleton<MergeEventWriter>();
|
||||
services.TryAddSingleton<AdvisoryMergeService>();
|
||||
services.AddTransient<MergeReconcileJob>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
#pragma warning disable CS0618 // Legacy merge services are marked obsolete.
|
||||
services.TryAddSingleton<MergeEventWriter>();
|
||||
services.TryAddSingleton<AdvisoryMergeService>();
|
||||
services.AddTransient<MergeReconcileJob>();
|
||||
#pragma warning restore CS0618
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,9 +14,10 @@ using StellaOps.Concelier.Storage.Mongo.Aliases;
|
||||
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Concelier.Merge.Services;
|
||||
|
||||
public sealed class AdvisoryMergeService
|
||||
namespace StellaOps.Concelier.Merge.Services;
|
||||
|
||||
[Obsolete("AdvisoryMergeService is deprecated. Transition callers to Link-Not-Merge observation/linkset APIs (MERGE-LNM-21-002) and enable concelier:features:noMergeEnabled when ready.", DiagnosticId = "CONCELIER0001", UrlFormat = "https://stella-ops.org/docs/migration/no-merge")]
|
||||
public sealed class AdvisoryMergeService
|
||||
{
|
||||
private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge");
|
||||
private static readonly Counter<long> AliasCollisionCounter = MergeMeter.CreateCounter<long>(
|
||||
|
||||
@@ -10,6 +10,6 @@
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|MERGE-LNM-21-001 Migration plan authoring|BE-Merge, Architecture Guild|CONCELIER-LNM-21-101|**DONE (2025-11-03)** – Authored `docs/migration/no-merge.md` with rollout phases, backfill/validation checklists, rollback guidance, and ownership matrix for the Link-Not-Merge cutover.|
|
||||
|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|**DOING (2025-11-03)** – Auditing service registrations, DI bindings, and tests consuming `AdvisoryMergeService`; drafting deprecation plan and analyzer scope prior to code removal.|
|
||||
|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|**DOING (2025-11-03)** – Auditing service registrations, DI bindings, and tests consuming `AdvisoryMergeService`; drafting deprecation plan and analyzer scope prior to code removal.<br>2025-11-05 14:42Z: Implementing `concelier:features:noMergeEnabled` gate, merge job allowlist checks, `[Obsolete]` markings, and analyzer scaffolding to steer consumers toward linkset APIs.<br>2025-11-06 16:10Z: Introduced Roslyn analyzer (`CONCELIER0002`) referenced by Concelier WebService + tests, documented suppression guidance, and updated migration playbook.|
|
||||
> 2025-11-03: Catalogued call sites (WebService Program `AddMergeModule`, built-in job registration `merge:reconcile`, `MergeReconcileJob`) and confirmed unit tests are the only direct `MergeAsync` callers; next step is to define analyzer + replacement observability coverage.
|
||||
|MERGE-LNM-21-003 Determinism/test updates|QA Guild, BE-Merge|MERGE-LNM-21-002|Replace merge determinism suites with observation/linkset regression tests verifying no data mutation and conflicts remain visible.|
|
||||
|
||||
@@ -280,57 +280,60 @@ public sealed record AdvisoryObservationLinkset
|
||||
IEnumerable<string>? cpes,
|
||||
IEnumerable<AdvisoryObservationReference>? references)
|
||||
{
|
||||
Aliases = NormalizeStringSet(aliases, toLower: true);
|
||||
Purls = NormalizeStringSet(purls);
|
||||
Cpes = NormalizeStringSet(cpes);
|
||||
References = NormalizeReferences(references);
|
||||
}
|
||||
Aliases = ToImmutableArray(aliases);
|
||||
Purls = ToImmutableArray(purls);
|
||||
Cpes = ToImmutableArray(cpes);
|
||||
References = ToImmutableReferences(references);
|
||||
}
|
||||
|
||||
public ImmutableArray<string> Aliases { get; }
|
||||
|
||||
public ImmutableArray<string> Purls { get; }
|
||||
|
||||
public ImmutableArray<string> Aliases { get; }
|
||||
|
||||
public ImmutableArray<string> Purls { get; }
|
||||
|
||||
public ImmutableArray<string> Cpes { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryObservationReference> References { get; }
|
||||
|
||||
private static ImmutableArray<string> NormalizeStringSet(IEnumerable<string>? values, bool toLower = false)
|
||||
{
|
||||
if (values is null)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var list = new List<string>();
|
||||
foreach (var value in values)
|
||||
{
|
||||
var trimmed = Validation.TrimToNull(value);
|
||||
if (trimmed is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
list.Add(toLower ? trimmed.ToLowerInvariant() : trimmed);
|
||||
}
|
||||
|
||||
return list
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(static v => v, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static ImmutableArray<AdvisoryObservationReference> NormalizeReferences(IEnumerable<AdvisoryObservationReference>? references)
|
||||
{
|
||||
if (references is null)
|
||||
{
|
||||
return ImmutableArray<AdvisoryObservationReference>.Empty;
|
||||
}
|
||||
|
||||
return references
|
||||
.Where(static reference => reference is not null)
|
||||
.Distinct()
|
||||
.OrderBy(static reference => reference.Type, StringComparer.Ordinal)
|
||||
.ThenBy(static reference => reference.Url, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
}
|
||||
public ImmutableArray<string> Cpes { get; }
|
||||
|
||||
public ImmutableArray<AdvisoryObservationReference> References { get; }
|
||||
|
||||
private static ImmutableArray<string> ToImmutableArray(IEnumerable<string>? values)
|
||||
{
|
||||
if (values is null)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<string>();
|
||||
foreach (var value in values)
|
||||
{
|
||||
var trimmed = Validation.TrimToNull(value);
|
||||
if (trimmed is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.Add(trimmed);
|
||||
}
|
||||
|
||||
return builder.Count == 0 ? ImmutableArray<string>.Empty : builder.ToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<AdvisoryObservationReference> ToImmutableReferences(IEnumerable<AdvisoryObservationReference>? references)
|
||||
{
|
||||
if (references is null)
|
||||
{
|
||||
return ImmutableArray<AdvisoryObservationReference>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<AdvisoryObservationReference>();
|
||||
foreach (var reference in references)
|
||||
{
|
||||
if (reference is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
builder.Add(reference);
|
||||
}
|
||||
|
||||
return builder.Count == 0 ? ImmutableArray<AdvisoryObservationReference>.Empty : builder.ToImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,11 +13,11 @@ public sealed class AdvisoryObservationFactoryTests
|
||||
private static readonly DateTimeOffset SampleTimestamp = DateTimeOffset.Parse("2025-10-26T12:34:56Z");
|
||||
|
||||
[Fact]
|
||||
public void Create_NormalizesLinksetIdentifiersAndReferences()
|
||||
{
|
||||
var factory = new AdvisoryObservationFactory();
|
||||
var rawDocument = BuildRawDocument(
|
||||
identifiers: new RawIdentifiers(
|
||||
public void Create_PreservesLinksetOrderAndDuplicates()
|
||||
{
|
||||
var factory = new AdvisoryObservationFactory();
|
||||
var rawDocument = BuildRawDocument(
|
||||
identifiers: new RawIdentifiers(
|
||||
Aliases: ImmutableArray.Create(" CVE-2025-0001 ", "ghsa-XXXX-YYYY"),
|
||||
PrimaryId: "GHSA-XXXX-YYYY"),
|
||||
linkset: new RawLinkset
|
||||
@@ -29,16 +29,27 @@ public sealed class AdvisoryObservationFactoryTests
|
||||
new RawReference("Advisory", " https://example.test/advisory "),
|
||||
new RawReference("ADVISORY", "https://example.test/advisory"))
|
||||
});
|
||||
|
||||
var observation = factory.Create(rawDocument, SampleTimestamp);
|
||||
|
||||
Assert.Equal(SampleTimestamp, observation.CreatedAt);
|
||||
Assert.Equal(new[] { "cve-2025-0001", "ghsa-xxxx-yyyy" }, observation.Linkset.Aliases);
|
||||
Assert.Equal(new[] { "pkg:npm/left-pad@1.0.0" }, observation.Linkset.Purls);
|
||||
Assert.Equal(new[] { "cpe:2.3:a:example:product:1.0:*:*:*:*:*:*:*" }, observation.Linkset.Cpes);
|
||||
var reference = Assert.Single(observation.Linkset.References);
|
||||
Assert.Equal("advisory", reference.Type);
|
||||
Assert.Equal("https://example.test/advisory", reference.Url);
|
||||
|
||||
var observation = factory.Create(rawDocument, SampleTimestamp);
|
||||
|
||||
Assert.Equal(SampleTimestamp, observation.CreatedAt);
|
||||
Assert.Equal(
|
||||
new[] { "GHSA-XXXX-YYYY", "CVE-2025-0001", "ghsa-XXXX-YYYY", "CVE-2025-0001" },
|
||||
observation.Linkset.Aliases);
|
||||
Assert.Equal(
|
||||
new[] { "pkg:NPM/left-pad@1.0.0", "pkg:npm/left-pad@1.0.0?foo=bar" },
|
||||
observation.Linkset.Purls);
|
||||
Assert.Equal(
|
||||
new[] { "cpe:/a:Example:Product:1.0", "cpe:/a:example:product:1.0" },
|
||||
observation.Linkset.Cpes);
|
||||
Assert.Equal(2, observation.Linkset.References.Length);
|
||||
Assert.All(
|
||||
observation.Linkset.References,
|
||||
reference =>
|
||||
{
|
||||
Assert.Equal("advisory", reference.Type);
|
||||
Assert.Equal("https://example.test/advisory", reference.Url);
|
||||
});
|
||||
|
||||
Assert.Equal(
|
||||
new[] { "GHSA-XXXX-YYYY", " CVE-2025-0001 ", "ghsa-XXXX-YYYY", " CVE-2025-0001 " },
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Nodes;
|
||||
@@ -52,9 +53,9 @@ public sealed class AdvisoryObservationQueryServiceTests
|
||||
Assert.Equal("tenant-a:osv:beta:1", result.Observations[0].ObservationId);
|
||||
Assert.Equal("tenant-a:ghsa:alpha:1", result.Observations[1].ObservationId);
|
||||
|
||||
Assert.Equal(
|
||||
new[] { "cve-2025-0001", "cve-2025-0002", "ghsa-xyzz" },
|
||||
result.Linkset.Aliases);
|
||||
Assert.Equal(
|
||||
new[] { "CVE-2025-0001", "CVE-2025-0002", "GHSA-xyzz" },
|
||||
result.Linkset.Aliases);
|
||||
|
||||
Assert.Equal(
|
||||
new[] { "pkg:npm/package-a@1.0.0", "pkg:pypi/package-b@2.0.0" },
|
||||
@@ -103,8 +104,11 @@ public sealed class AdvisoryObservationQueryServiceTests
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal(2, result.Observations.Length);
|
||||
Assert.All(result.Observations, observation =>
|
||||
Assert.Contains(observation.Linkset.Aliases, alias => alias is "cve-2025-0001" or "cve-2025-9999"));
|
||||
Assert.All(result.Observations, observation =>
|
||||
Assert.Contains(
|
||||
observation.Linkset.Aliases,
|
||||
alias => alias.Equals("CVE-2025-0001", StringComparison.OrdinalIgnoreCase)
|
||||
|| alias.Equals("CVE-2025-9999", StringComparison.OrdinalIgnoreCase)));
|
||||
|
||||
Assert.False(result.HasMore);
|
||||
Assert.Null(result.NextCursor);
|
||||
|
||||
@@ -10,5 +10,8 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../../StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="../../__Analyzers/StellaOps.Concelier.Analyzers/StellaOps.Concelier.Analyzers.csproj"
|
||||
OutputItemType="Analyzer"
|
||||
ReferenceOutputAssembly="false" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -221,7 +221,7 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
|
||||
Assert.NotNull(ingestResponse.Headers.Location);
|
||||
var locationValue = ingestResponse.Headers.Location!.ToString();
|
||||
Assert.False(string.IsNullOrWhiteSpace(locationValue));
|
||||
var lastSlashIndex = locationValue.LastIndexOf('/', StringComparison.Ordinal);
|
||||
var lastSlashIndex = locationValue.LastIndexOf('/');
|
||||
var idSegment = lastSlashIndex >= 0
|
||||
? locationValue[(lastSlashIndex + 1)..]
|
||||
: locationValue;
|
||||
@@ -886,15 +886,61 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
|
||||
var limitedResponse = await client.GetAsync("/concelier/exports/index.json");
|
||||
Assert.Equal((HttpStatusCode)429, limitedResponse.StatusCode);
|
||||
Assert.NotNull(limitedResponse.Headers.RetryAfter);
|
||||
Assert.True(limitedResponse.Headers.RetryAfter!.Delta.HasValue);
|
||||
Assert.True(limitedResponse.Headers.RetryAfter!.Delta!.Value.TotalSeconds > 0);
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task JobsEndpointsAllowBypassWhenAuthorityEnabled()
|
||||
{
|
||||
var environment = new Dictionary<string, string?>
|
||||
Assert.True(limitedResponse.Headers.RetryAfter!.Delta.HasValue);
|
||||
Assert.True(limitedResponse.Headers.RetryAfter!.Delta!.Value.TotalSeconds > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MergeModuleDisabledWhenFeatureFlagEnabled()
|
||||
{
|
||||
var environment = new Dictionary<string, string?>
|
||||
{
|
||||
["CONCELIER_FEATURES__NOMERGEENABLED"] = "true"
|
||||
};
|
||||
|
||||
using var factory = new ConcelierApplicationFactory(
|
||||
_runner.ConnectionString,
|
||||
authorityConfigure: null,
|
||||
environmentOverrides: environment);
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var provider = scope.ServiceProvider;
|
||||
|
||||
#pragma warning disable CS0618, CONCELIER0001, CONCELIER0002 // Checking deprecated service registration state.
|
||||
Assert.Null(provider.GetService<AdvisoryMergeService>());
|
||||
#pragma warning restore CS0618, CONCELIER0001, CONCELIER0002
|
||||
|
||||
var schedulerOptions = provider.GetRequiredService<IOptions<JobSchedulerOptions>>().Value;
|
||||
Assert.DoesNotContain("merge:reconcile", schedulerOptions.Definitions.Keys);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MergeJobRemainsWhenAllowlisted()
|
||||
{
|
||||
var environment = new Dictionary<string, string?>
|
||||
{
|
||||
["CONCELIER_FEATURES__MERGEJOBALLOWLIST__0"] = "merge:reconcile"
|
||||
};
|
||||
|
||||
using var factory = new ConcelierApplicationFactory(
|
||||
_runner.ConnectionString,
|
||||
authorityConfigure: null,
|
||||
environmentOverrides: environment);
|
||||
using var scope = factory.Services.CreateScope();
|
||||
var provider = scope.ServiceProvider;
|
||||
|
||||
#pragma warning disable CS0618, CONCELIER0001, CONCELIER0002 // Checking deprecated service registration state.
|
||||
Assert.NotNull(provider.GetService<AdvisoryMergeService>());
|
||||
#pragma warning restore CS0618, CONCELIER0001, CONCELIER0002
|
||||
|
||||
var schedulerOptions = provider.GetRequiredService<IOptions<JobSchedulerOptions>>().Value;
|
||||
Assert.Contains("merge:reconcile", schedulerOptions.Definitions.Keys);
|
||||
}
|
||||
|
||||
|
||||
[Fact]
|
||||
public async Task JobsEndpointsAllowBypassWhenAuthorityEnabled()
|
||||
{
|
||||
var environment = new Dictionary<string, string?>
|
||||
{
|
||||
["CONCELIER_AUTHORITY__ENABLED"] = "true",
|
||||
["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false",
|
||||
|
||||
@@ -84,10 +84,13 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier
|
||||
throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation }));
|
||||
}
|
||||
|
||||
VexSignatureMetadata? signatureMetadata = null;
|
||||
if (document.Format == VexDocumentFormat.OciAttestation && _attestationVerifier is not null)
|
||||
VexSignatureMetadata? signatureMetadata = null;
|
||||
VexAttestationDiagnostics? attestationDiagnostics = null;
|
||||
if (document.Format == VexDocumentFormat.OciAttestation && _attestationVerifier is not null)
|
||||
{
|
||||
signatureMetadata = await VerifyAttestationAsync(document, metadata, cancellationToken).ConfigureAwait(false);
|
||||
var attestationResult = await VerifyAttestationAsync(document, metadata, cancellationToken).ConfigureAwait(false);
|
||||
signatureMetadata = attestationResult.Metadata;
|
||||
attestationDiagnostics = attestationResult.Diagnostics;
|
||||
}
|
||||
|
||||
signatureMetadata ??= ExtractSignatureMetadata(metadata);
|
||||
@@ -96,31 +99,40 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier
|
||||
signatureMetadata = await AttachIssuerTrustAsync(signatureMetadata, metadata, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
var resultLabel = signatureMetadata is null ? "skipped" : "ok";
|
||||
RecordVerification(document.ProviderId, metadata, resultLabel);
|
||||
|
||||
if (resultLabel == "skipped")
|
||||
{
|
||||
if (attestationDiagnostics is not null)
|
||||
{
|
||||
resultLabel = attestationDiagnostics.Result ?? resultLabel;
|
||||
}
|
||||
|
||||
if (attestationDiagnostics is null)
|
||||
{
|
||||
RecordVerification(document.ProviderId, metadata, resultLabel);
|
||||
}
|
||||
|
||||
if (resultLabel == "skipped")
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Signature verification skipped for provider {ProviderId} (no signature metadata).",
|
||||
document.ProviderId);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Signature metadata recorded for provider {ProviderId} (type={SignatureType}, subject={Subject}, issuer={Issuer}).",
|
||||
document.ProviderId,
|
||||
signatureMetadata!.Type,
|
||||
signatureMetadata.Subject ?? "<unknown>",
|
||||
signatureMetadata.Issuer ?? "<unknown>");
|
||||
}
|
||||
|
||||
return signatureMetadata;
|
||||
}
|
||||
|
||||
private async ValueTask<VexSignatureMetadata?> VerifyAttestationAsync(
|
||||
VexRawDocument document,
|
||||
ImmutableDictionary<string, string> metadata,
|
||||
CancellationToken cancellationToken)
|
||||
_logger.LogInformation(
|
||||
"Signature metadata recorded for provider {ProviderId} (type={SignatureType}, subject={Subject}, issuer={Issuer}, result={Result}).",
|
||||
document.ProviderId,
|
||||
signatureMetadata!.Type,
|
||||
signatureMetadata.Subject ?? "<unknown>",
|
||||
signatureMetadata.Issuer ?? "<unknown>",
|
||||
resultLabel);
|
||||
}
|
||||
|
||||
return signatureMetadata;
|
||||
}
|
||||
|
||||
private async ValueTask<(VexSignatureMetadata Metadata, VexAttestationDiagnostics Diagnostics)> VerifyAttestationAsync(
|
||||
VexRawDocument document,
|
||||
ImmutableDictionary<string, string> metadata,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -146,37 +158,48 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier
|
||||
attestationMetadata,
|
||||
envelopeJson);
|
||||
|
||||
var verification = await _attestationVerifier!
|
||||
.VerifyAsync(verificationRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (!verification.IsValid)
|
||||
{
|
||||
var diagnostics = string.Join(", ", verification.Diagnostics.Select(kvp => $"{kvp.Key}={kvp.Value}"));
|
||||
_logger.LogError(
|
||||
"Attestation verification failed for provider {ProviderId} (uri={SourceUri}) diagnostics={Diagnostics}",
|
||||
document.ProviderId,
|
||||
document.SourceUri,
|
||||
diagnostics);
|
||||
|
||||
var violation = AocViolation.Create(
|
||||
AocViolationCode.SignatureInvalid,
|
||||
"/upstream/signature",
|
||||
"Attestation verification failed.");
|
||||
|
||||
RecordVerification(document.ProviderId, metadata, "fail");
|
||||
throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation }));
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Attestation verification succeeded for provider {ProviderId} (predicate={PredicateType}, subject={Subject}).",
|
||||
document.ProviderId,
|
||||
attestationMetadata.PredicateType,
|
||||
statement.Subject[0].Name ?? "<unknown>");
|
||||
|
||||
return BuildSignatureMetadata(statement, metadata, attestationMetadata, verification.Diagnostics);
|
||||
}
|
||||
catch (ExcititorAocGuardException)
|
||||
{
|
||||
var verification = await _attestationVerifier!
|
||||
.VerifyAsync(verificationRequest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var diagnosticsSnapshot = verification.Diagnostics;
|
||||
|
||||
if (!verification.IsValid)
|
||||
{
|
||||
var failureReason = diagnosticsSnapshot.FailureReason ?? "verification_failed";
|
||||
var resultTag = diagnosticsSnapshot.Result ?? "invalid";
|
||||
|
||||
RecordVerification(document.ProviderId, metadata, resultTag);
|
||||
_logger.LogError(
|
||||
"Attestation verification failed for provider {ProviderId} (uri={SourceUri}) result={Result} failure={FailureReason} diagnostics={@Diagnostics}",
|
||||
document.ProviderId,
|
||||
document.SourceUri,
|
||||
resultTag,
|
||||
failureReason,
|
||||
diagnosticsSnapshot);
|
||||
|
||||
var violation = AocViolation.Create(
|
||||
AocViolationCode.SignatureInvalid,
|
||||
"/upstream/signature",
|
||||
"Attestation verification failed.");
|
||||
|
||||
throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation }));
|
||||
}
|
||||
|
||||
var successResult = diagnosticsSnapshot.Result ?? "valid";
|
||||
RecordVerification(document.ProviderId, metadata, successResult);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Attestation verification succeeded for provider {ProviderId} (predicate={PredicateType}, subject={Subject}, result={Result}).",
|
||||
document.ProviderId,
|
||||
attestationMetadata.PredicateType,
|
||||
statement.Subject[0].Name ?? "<unknown>",
|
||||
successResult);
|
||||
|
||||
var signatureMetadata = BuildSignatureMetadata(statement, metadata, attestationMetadata, diagnosticsSnapshot);
|
||||
return (signatureMetadata, diagnosticsSnapshot);
|
||||
}
|
||||
catch (ExcititorAocGuardException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -192,10 +215,10 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier
|
||||
"/upstream/signature",
|
||||
$"Attestation verification encountered an error: {ex.Message}");
|
||||
|
||||
RecordVerification(document.ProviderId, metadata, "fail");
|
||||
throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation }));
|
||||
}
|
||||
}
|
||||
RecordVerification(document.ProviderId, metadata, "error");
|
||||
throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation }));
|
||||
}
|
||||
}
|
||||
|
||||
private VexAttestationRequest BuildAttestationRequest(VexInTotoStatement statement, VexAttestationPredicate predicate)
|
||||
{
|
||||
@@ -252,11 +275,11 @@ internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier
|
||||
signedAt);
|
||||
}
|
||||
|
||||
private VexSignatureMetadata BuildSignatureMetadata(
|
||||
VexInTotoStatement statement,
|
||||
ImmutableDictionary<string, string> metadata,
|
||||
VexAttestationMetadata attestationMetadata,
|
||||
ImmutableDictionary<string, string> diagnostics)
|
||||
private VexSignatureMetadata BuildSignatureMetadata(
|
||||
VexInTotoStatement statement,
|
||||
ImmutableDictionary<string, string> metadata,
|
||||
VexAttestationMetadata attestationMetadata,
|
||||
VexAttestationDiagnostics diagnostics)
|
||||
{
|
||||
metadata.TryGetValue("vex.signature.type", out var type);
|
||||
metadata.TryGetValue("vex.provenance.cosign.subject", out var subject);
|
||||
|
||||
@@ -13,16 +13,18 @@
|
||||
3. Emit observability signals (logs, metrics, optional tracing) that can run offline and degrade gracefully when transparency services are unreachable.
|
||||
4. Add regression tests (unit + integration) covering positive path, negative path, and offline fallback scenarios.
|
||||
|
||||
## 2. Deliverables
|
||||
|
||||
- `IVexAttestationVerifier` abstraction + `VexAttestationVerifier` implementation inside `StellaOps.Excititor.Attestation`, encapsulating DSSE validation, predicate checks, artifact digest confirmation, Rekor inclusion verification, and deterministic diagnostics.
|
||||
- DI wiring (extension method) for registering verifier + instrumentation dependencies alongside the existing signer/rekor client.
|
||||
- Shared `VexAttestationDiagnostics` record describing normalized diagnostic keys consumed by Worker/WebService logging.
|
||||
- Metrics utility (`AttestationMetrics`) exposing counters/histograms via `System.Diagnostics.Metrics`, exported under `StellaOps.Excititor.Attestation` meter.
|
||||
- Activity source (`AttestationActivitySource`) for optional tracing spans around sign/verify operations.
|
||||
- Documentation updates (`EXCITITOR-ATTEST-01-003-plan.md`, `TASKS.md` notes) describing instrumentation + test expectations.
|
||||
- Test coverage in `StellaOps.Excititor.Attestation.Tests` (unit) and scaffolding notes for WebService/Worker integration tests.
|
||||
|
||||
## 2. Deliverables
|
||||
|
||||
- `IVexAttestationVerifier` abstraction + `VexAttestationVerifier` implementation inside `StellaOps.Excititor.Attestation`, encapsulating DSSE validation, predicate checks, artifact digest confirmation, Rekor inclusion verification, and deterministic diagnostics.
|
||||
- DI wiring (extension method) for registering verifier + instrumentation dependencies alongside the existing signer/rekor client.
|
||||
- Shared `VexAttestationDiagnostics` record describing normalized diagnostic keys consumed by Worker/WebService logging.
|
||||
- Metrics utility (`AttestationMetrics`) exposing counters/histograms via `System.Diagnostics.Metrics`, exported under `StellaOps.Excititor.Attestation` meter.
|
||||
- Activity source (`AttestationActivitySource`) for optional tracing spans around sign/verify operations.
|
||||
- 2025-11-05: Implemented `VexAttestationDiagnostics`, activity tagging via `VexAttestationActivitySource`, and updated verifier/tests to emit structured failure reasons.
|
||||
- 2025-11-05 (pm): Worker attestation verifier now records structured diagnostics/metrics and logs result/failure reasons using `VexAttestationDiagnostics`; attestation success/failure labels propagate to verification counters.
|
||||
- Documentation updates (`EXCITITOR-ATTEST-01-003-plan.md`, `TASKS.md` notes) describing instrumentation + test expectations.
|
||||
- Test coverage in `StellaOps.Excititor.Attestation.Tests` (unit) and scaffolding notes for WebService/Worker integration tests.
|
||||
|
||||
## 3. Verification Flow
|
||||
|
||||
### 3.1 Inputs
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
If you are working on this file you need to read docs/modules/excititor/ARCHITECTURE.md and ./AGENTS.md).
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|EXCITITOR-ATTEST-01-003 – Verification suite & observability|Team Excititor Attestation|EXCITITOR-ATTEST-01-002|DOING (2025-10-22) – Continuing implementation: build `IVexAttestationVerifier`, wire metrics/logging, and add regression tests. Draft plan in `EXCITITOR-ATTEST-01-003-plan.md` (2025-10-19) guides scope; updating with worknotes as progress lands.<br>2025-10-31: Verifier now tolerates duplicate source providers from AOC raw projections, downgrades offline Rekor verification to a degraded result, and enforces trusted signer registry checks with detailed diagnostics/tests.|
|
||||
|
||||
If you are working on this file you need to read docs/modules/excititor/ARCHITECTURE.md and ./AGENTS.md).
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|EXCITITOR-ATTEST-01-003 – Verification suite & observability|Team Excititor Attestation|EXCITITOR-ATTEST-01-002|DOING (2025-10-22) – Continuing implementation: build `IVexAttestationVerifier`, wire metrics/logging, and add regression tests. Draft plan in `EXCITITOR-ATTEST-01-003-plan.md` (2025-10-19) guides scope; updating with worknotes as progress lands.<br>2025-10-31: Verifier now tolerates duplicate source providers from AOC raw projections, downgrades offline Rekor verification to a degraded result, and enforces trusted signer registry checks with detailed diagnostics/tests.<br>2025-11-05 14:35Z: Picking up diagnostics record/ActivitySource work and aligning metrics dimensions before wiring verifier into WebService/Worker paths.|
|
||||
> 2025-11-05 19:10Z: Worker signature verifier now emits structured diagnostics/metrics via `VexAttestationDiagnostics`; attestation verification results flow into metric labels and logs.
|
||||
> Remark (2025-10-22): Added verifier implementation + metrics/tests; next steps include wiring into WebService/Worker flows and expanding negative-path coverage.
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace StellaOps.Excititor.Attestation.Verification;
|
||||
|
||||
public static class VexAttestationActivitySource
|
||||
{
|
||||
public const string Name = "StellaOps.Excititor.Attestation";
|
||||
|
||||
public static readonly ActivitySource Value = new(Name);
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
@@ -59,104 +59,120 @@ internal sealed class VexAttestationVerifier : IVexAttestationVerifier
|
||||
public async ValueTask<VexAttestationVerification> VerifyAsync(
|
||||
VexAttestationVerificationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var diagnostics = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var diagnostics = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
|
||||
var resultLabel = "valid";
|
||||
var rekorState = "skipped";
|
||||
var component = request.IsReverify ? "worker" : "webservice";
|
||||
void SetFailure(string reason) => diagnostics["failure_reason"] = reason;
|
||||
using var activity = VexAttestationActivitySource.Value.StartActivity("Verify", ActivityKind.Internal);
|
||||
activity?.SetTag("attestation.component", component);
|
||||
activity?.SetTag("attestation.export_id", request.Attestation.ExportId);
|
||||
|
||||
try
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Envelope))
|
||||
{
|
||||
diagnostics["envelope.state"] = "missing";
|
||||
_logger.LogWarning("Attestation envelope is missing for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!TryDeserializeEnvelope(request.Envelope, out var envelope, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Failed to deserialize attestation envelope for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!string.Equals(envelope.PayloadType, VexDsseBuilder.PayloadType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
diagnostics["payload.type"] = envelope.PayloadType ?? string.Empty;
|
||||
_logger.LogWarning(
|
||||
"Unexpected DSSE payload type {PayloadType} for export {ExportId}",
|
||||
envelope.PayloadType,
|
||||
request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (envelope.Signatures is null || envelope.Signatures.Count == 0)
|
||||
{
|
||||
diagnostics["signature.state"] = "missing";
|
||||
_logger.LogWarning("Attestation envelope for export {ExportId} does not contain signatures.", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
var payloadBase64 = envelope.Payload ?? string.Empty;
|
||||
if (!TryDecodePayload(payloadBase64, out var payloadBytes, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Failed to decode attestation payload for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!TryDeserializeStatement(payloadBytes, out var statement, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Failed to deserialize DSSE statement for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidatePredicateType(statement, request, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Predicate type mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidateSubject(statement, request, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Subject mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidatePredicate(statement, request, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Predicate payload mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidateMetadataDigest(envelope, request.Metadata, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("Attestation digest mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidateSignedAt(request.Metadata, request.Attestation.CreatedAt, diagnostics))
|
||||
{
|
||||
_logger.LogWarning("SignedAt validation failed for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.Envelope))
|
||||
{
|
||||
diagnostics["envelope.state"] = "missing";
|
||||
SetFailure("missing_envelope");
|
||||
_logger.LogWarning("Attestation envelope is missing for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!TryDeserializeEnvelope(request.Envelope, out var envelope, diagnostics))
|
||||
{
|
||||
SetFailure("invalid_envelope");
|
||||
_logger.LogWarning("Failed to deserialize attestation envelope for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!string.Equals(envelope.PayloadType, VexDsseBuilder.PayloadType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
diagnostics["payload.type"] = envelope.PayloadType ?? string.Empty;
|
||||
SetFailure("unexpected_payload_type");
|
||||
_logger.LogWarning(
|
||||
"Unexpected DSSE payload type {PayloadType} for export {ExportId}",
|
||||
envelope.PayloadType,
|
||||
request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (envelope.Signatures is null || envelope.Signatures.Count == 0)
|
||||
{
|
||||
diagnostics["signature.state"] = "missing";
|
||||
SetFailure("missing_signature");
|
||||
_logger.LogWarning("Attestation envelope for export {ExportId} does not contain signatures.", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
var payloadBase64 = envelope.Payload ?? string.Empty;
|
||||
if (!TryDecodePayload(payloadBase64, out var payloadBytes, diagnostics))
|
||||
{
|
||||
SetFailure("payload_decode_failed");
|
||||
_logger.LogWarning("Failed to decode attestation payload for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!TryDeserializeStatement(payloadBytes, out var statement, diagnostics))
|
||||
{
|
||||
SetFailure("invalid_statement");
|
||||
_logger.LogWarning("Failed to deserialize DSSE statement for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidatePredicateType(statement, request, diagnostics))
|
||||
{
|
||||
SetFailure("predicate_type_mismatch");
|
||||
_logger.LogWarning("Predicate type mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidateSubject(statement, request, diagnostics))
|
||||
{
|
||||
SetFailure("subject_mismatch");
|
||||
_logger.LogWarning("Subject mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidatePredicate(statement, request, diagnostics))
|
||||
{
|
||||
SetFailure("predicate_mismatch");
|
||||
_logger.LogWarning("Predicate payload mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidateMetadataDigest(envelope, request.Metadata, diagnostics))
|
||||
{
|
||||
SetFailure("envelope_digest_mismatch");
|
||||
_logger.LogWarning("Attestation digest mismatch for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
if (!ValidateSignedAt(request.Metadata, request.Attestation.CreatedAt, diagnostics))
|
||||
{
|
||||
SetFailure("signedat_out_of_range");
|
||||
_logger.LogWarning("SignedAt validation failed for export {ExportId}", request.Attestation.ExportId);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
|
||||
rekorState = await VerifyTransparencyAsync(request.Metadata, diagnostics, cancellationToken).ConfigureAwait(false);
|
||||
if (rekorState is "missing" or "unverified" or "client_unavailable")
|
||||
{
|
||||
SetFailure(rekorState);
|
||||
resultLabel = "invalid";
|
||||
return BuildResult(false);
|
||||
}
|
||||
@@ -164,6 +180,9 @@ internal sealed class VexAttestationVerifier : IVexAttestationVerifier
|
||||
var signaturesVerified = await VerifySignaturesAsync(payloadBytes, envelope.Signatures, diagnostics, cancellationToken).ConfigureAwait(false);
|
||||
if (!signaturesVerified)
|
||||
{
|
||||
diagnostics["failure_reason"] = diagnostics.TryGetValue("signature.reason", out var reason)
|
||||
? reason
|
||||
: "signature_verification_failed";
|
||||
if (_options.RequireSignatureVerification)
|
||||
{
|
||||
resultLabel = "invalid";
|
||||
@@ -183,13 +202,16 @@ internal sealed class VexAttestationVerifier : IVexAttestationVerifier
|
||||
catch (Exception ex)
|
||||
{
|
||||
diagnostics["error"] = ex.GetType().Name;
|
||||
diagnostics["error.message"] = ex.Message; resultLabel = "error";
|
||||
_logger.LogError(ex, "Unexpected exception verifying attestation for export {ExportId}", request.Attestation.ExportId);
|
||||
return BuildResult(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopwatch.Stop();
|
||||
diagnostics["error.message"] = ex.Message; resultLabel = "error";
|
||||
_logger.LogError(ex, "Unexpected exception verifying attestation for export {ExportId}", request.Attestation.ExportId);
|
||||
diagnostics["failure_reason"] = diagnostics.TryGetValue("error", out var errorCode)
|
||||
? errorCode
|
||||
: ex.GetType().Name;
|
||||
return BuildResult(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
stopwatch.Stop();
|
||||
var tags = new KeyValuePair<string, object?>[]
|
||||
{
|
||||
new("result", resultLabel),
|
||||
@@ -200,12 +222,32 @@ internal sealed class VexAttestationVerifier : IVexAttestationVerifier
|
||||
_metrics.VerifyDuration.Record(stopwatch.Elapsed.TotalSeconds, tags);
|
||||
}
|
||||
|
||||
VexAttestationVerification BuildResult(bool isValid)
|
||||
{
|
||||
diagnostics["result"] = resultLabel;
|
||||
diagnostics["component"] = component;
|
||||
VexAttestationVerification BuildResult(bool isValid)
|
||||
{
|
||||
diagnostics["result"] = resultLabel;
|
||||
diagnostics["component"] = component;
|
||||
diagnostics["rekor.state"] = rekorState;
|
||||
return new VexAttestationVerification(isValid, diagnostics.ToImmutable());
|
||||
var snapshot = VexAttestationDiagnostics.FromBuilder(diagnostics);
|
||||
|
||||
if (activity is { } currentActivity)
|
||||
{
|
||||
currentActivity.SetTag("attestation.result", resultLabel);
|
||||
currentActivity.SetTag("attestation.rekor", rekorState);
|
||||
if (!isValid)
|
||||
{
|
||||
var failure = snapshot.FailureReason ?? "verification_failed";
|
||||
currentActivity.SetStatus(ActivityStatusCode.Error, failure);
|
||||
currentActivity.SetTag("attestation.failure_reason", failure);
|
||||
}
|
||||
else
|
||||
{
|
||||
currentActivity.SetStatus(resultLabel is "degraded"
|
||||
? ActivityStatusCode.Ok
|
||||
: ActivityStatusCode.Ok);
|
||||
}
|
||||
}
|
||||
|
||||
return new VexAttestationVerification(isValid, snapshot);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,4 +14,4 @@
|
||||
<ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" />
|
||||
<ProjectReference Include="../../../Concelier/__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Excititor.Attestation.Verification;
|
||||
|
||||
namespace StellaOps.Excititor.Core;
|
||||
|
||||
@@ -33,4 +34,4 @@ public sealed record VexAttestationVerificationRequest(
|
||||
|
||||
public sealed record VexAttestationVerification(
|
||||
bool IsValid,
|
||||
ImmutableDictionary<string, string> Diagnostics);
|
||||
VexAttestationDiagnostics Diagnostics);
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Excititor.Attestation.Verification;
|
||||
|
||||
public sealed class VexAttestationDiagnostics : IReadOnlyDictionary<string, string>
|
||||
{
|
||||
private readonly ImmutableDictionary<string, string> _values;
|
||||
|
||||
private VexAttestationDiagnostics(ImmutableDictionary<string, string> values)
|
||||
{
|
||||
_values = values ?? ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
public static VexAttestationDiagnostics FromBuilder(ImmutableDictionary<string, string>.Builder builder)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(builder);
|
||||
return new(builder.ToImmutable());
|
||||
}
|
||||
|
||||
public static VexAttestationDiagnostics Empty { get; } = new(ImmutableDictionary<string, string>.Empty);
|
||||
|
||||
public string? Result => TryGetValue("result", out var value) ? value : null;
|
||||
|
||||
public string? Component => TryGetValue("component", out var value) ? value : null;
|
||||
|
||||
public string? RekorState => TryGetValue("rekor.state", out var value) ? value : null;
|
||||
|
||||
public string? FailureReason => TryGetValue("failure_reason", out var value) ? value : null;
|
||||
|
||||
public string this[string key] => _values[key];
|
||||
|
||||
public IEnumerable<string> Keys => _values.Keys;
|
||||
|
||||
public IEnumerable<string> Values => _values.Values;
|
||||
|
||||
public int Count => _values.Count;
|
||||
|
||||
public bool ContainsKey(string key) => _values.ContainsKey(key);
|
||||
|
||||
public bool TryGetValue(string key, out string value)
|
||||
{
|
||||
if (_values.TryGetValue(key, out var stored))
|
||||
{
|
||||
value = stored;
|
||||
return true;
|
||||
}
|
||||
|
||||
value = string.Empty;
|
||||
return false;
|
||||
}
|
||||
|
||||
public IEnumerator<KeyValuePair<string, string>> GetEnumerator() => _values.GetEnumerator();
|
||||
|
||||
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
|
||||
}
|
||||
@@ -85,6 +85,6 @@ public sealed class VexAttestationClientTests
|
||||
private sealed class FakeVerifier : IVexAttestationVerifier
|
||||
{
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
=> ValueTask.FromResult(new VexAttestationVerification(true, ImmutableDictionary<string, string>.Empty));
|
||||
=> ValueTask.FromResult(new VexAttestationVerification(true, VexAttestationDiagnostics.Empty));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,42 +16,44 @@ public sealed class VexAttestationVerifierTests : IDisposable
|
||||
{
|
||||
private readonly VexAttestationMetrics _metrics = new();
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsValid_WhenEnvelopeMatches()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync();
|
||||
var verifier = CreateVerifier(options => options.RequireTransparencyLog = false);
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsValid_WhenEnvelopeMatches()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync();
|
||||
var verifier = CreateVerifier(options => options.RequireTransparencyLog = false);
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, metadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.True(verification.IsValid);
|
||||
Assert.Equal("valid", verification.Diagnostics.Result);
|
||||
Assert.Null(verification.Diagnostics.FailureReason);
|
||||
}
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, metadata, envelope),
|
||||
CancellationToken.None);
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsInvalid_WhenDigestMismatch()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync();
|
||||
var verifier = CreateVerifier(options => options.RequireTransparencyLog = false);
|
||||
|
||||
var tamperedMetadata = new VexAttestationMetadata(
|
||||
metadata.PredicateType,
|
||||
metadata.Rekor,
|
||||
"sha256:deadbeef",
|
||||
metadata.SignedAt);
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, tamperedMetadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("invalid", verification.Diagnostics.Result);
|
||||
Assert.Equal("sha256:deadbeef", verification.Diagnostics["metadata.envelopeDigest"]);
|
||||
Assert.Equal("envelope_digest_mismatch", verification.Diagnostics.FailureReason);
|
||||
}
|
||||
|
||||
Assert.True(verification.IsValid);
|
||||
Assert.Equal("valid", verification.Diagnostics["result"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsInvalid_WhenDigestMismatch()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync();
|
||||
var verifier = CreateVerifier(options => options.RequireTransparencyLog = false);
|
||||
|
||||
var tamperedMetadata = new VexAttestationMetadata(
|
||||
metadata.PredicateType,
|
||||
metadata.Rekor,
|
||||
"sha256:deadbeef",
|
||||
metadata.SignedAt);
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, tamperedMetadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("invalid", verification.Diagnostics["result"]);
|
||||
Assert.Equal("sha256:deadbeef", verification.Diagnostics["metadata.envelopeDigest"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Fact]
|
||||
public async Task VerifyAsync_AllowsOfflineTransparency_WhenConfigured()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync(includeRekor: true);
|
||||
@@ -67,47 +69,50 @@ public sealed class VexAttestationVerifierTests : IDisposable
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.True(verification.IsValid);
|
||||
Assert.Equal("offline", verification.Diagnostics["rekor.state"]);
|
||||
Assert.Equal("degraded", verification.Diagnostics["result"]);
|
||||
Assert.Equal("offline", verification.Diagnostics.RekorState);
|
||||
Assert.Equal("degraded", verification.Diagnostics.Result);
|
||||
Assert.Null(verification.Diagnostics.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsInvalid_WhenTransparencyRequiredAndMissing()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync(includeRekor: false);
|
||||
var verifier = CreateVerifier(options =>
|
||||
{
|
||||
options.RequireTransparencyLog = true;
|
||||
options.AllowOfflineTransparency = false;
|
||||
});
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, metadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("missing", verification.Diagnostics["rekor.state"]);
|
||||
Assert.Equal("invalid", verification.Diagnostics["result"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsInvalid_WhenTransparencyUnavailableAndOfflineDisallowed()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync(includeRekor: true);
|
||||
var transparency = new ThrowingTransparencyLogClient();
|
||||
var verifier = CreateVerifier(options =>
|
||||
{
|
||||
options.RequireTransparencyLog = true;
|
||||
options.AllowOfflineTransparency = false;
|
||||
}, transparency);
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, metadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("unreachable", verification.Diagnostics["rekor.state"]);
|
||||
Assert.Equal("invalid", verification.Diagnostics["result"]);
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync(includeRekor: false);
|
||||
var verifier = CreateVerifier(options =>
|
||||
{
|
||||
options.RequireTransparencyLog = true;
|
||||
options.AllowOfflineTransparency = false;
|
||||
});
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, metadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("missing", verification.Diagnostics.RekorState);
|
||||
Assert.Equal("invalid", verification.Diagnostics.Result);
|
||||
Assert.Equal("missing", verification.Diagnostics.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ReturnsInvalid_WhenTransparencyUnavailableAndOfflineDisallowed()
|
||||
{
|
||||
var (request, metadata, envelope) = await CreateSignedAttestationAsync(includeRekor: true);
|
||||
var transparency = new ThrowingTransparencyLogClient();
|
||||
var verifier = CreateVerifier(options =>
|
||||
{
|
||||
options.RequireTransparencyLog = true;
|
||||
options.AllowOfflineTransparency = false;
|
||||
}, transparency);
|
||||
|
||||
var verification = await verifier.VerifyAsync(
|
||||
new VexAttestationVerificationRequest(request, metadata, envelope),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("unreachable", verification.Diagnostics.RekorState);
|
||||
Assert.Equal("invalid", verification.Diagnostics.Result);
|
||||
Assert.Equal("unreachable", verification.Diagnostics.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -125,7 +130,7 @@ public sealed class VexAttestationVerifierTests : IDisposable
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.True(verification.IsValid);
|
||||
Assert.Equal("valid", verification.Diagnostics["result"]);
|
||||
Assert.Equal("valid", verification.Diagnostics.Result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -152,6 +157,8 @@ public sealed class VexAttestationVerifierTests : IDisposable
|
||||
|
||||
Assert.True(verification.IsValid);
|
||||
Assert.Equal("verified", verification.Diagnostics["signature.state"]);
|
||||
Assert.Equal("valid", verification.Diagnostics.Result);
|
||||
Assert.Null(verification.Diagnostics.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -179,6 +186,8 @@ public sealed class VexAttestationVerifierTests : IDisposable
|
||||
Assert.False(verification.IsValid);
|
||||
Assert.Equal("error", verification.Diagnostics["signature.state"]);
|
||||
Assert.Equal("verification_failed", verification.Diagnostics["signature.reason"]);
|
||||
Assert.Equal("verification_failed", verification.Diagnostics.FailureReason);
|
||||
Assert.Equal("invalid", verification.Diagnostics.Result);
|
||||
}
|
||||
|
||||
private async Task<(VexAttestationRequest Request, VexAttestationMetadata Metadata, string Envelope)> CreateSignedAttestationAsync(
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Globalization;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Attestation.Verification;
|
||||
using StellaOps.Excititor.Export;
|
||||
using StellaOps.Excititor.Policy;
|
||||
using StellaOps.Excititor.Storage.Mongo;
|
||||
@@ -291,7 +292,7 @@ public sealed class ExportEngineTests
|
||||
}
|
||||
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
=> ValueTask.FromResult(new VexAttestationVerification(true, ImmutableDictionary<string, string>.Empty));
|
||||
=> ValueTask.FromResult(new VexAttestationVerification(true, VexAttestationDiagnostics.Empty));
|
||||
}
|
||||
|
||||
private sealed class RecordingCacheIndex : IVexCacheIndex
|
||||
|
||||
@@ -4,13 +4,14 @@ using System.Collections.Immutable;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Export;
|
||||
using StellaOps.Excititor.Storage.Mongo;
|
||||
using StellaOps.Excititor.WebService.Services;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Attestation.Verification;
|
||||
using StellaOps.Excititor.Export;
|
||||
using StellaOps.Excititor.Storage.Mongo;
|
||||
using StellaOps.Excititor.WebService.Services;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Excititor.Attestation.Dsse;
|
||||
|
||||
@@ -162,7 +163,7 @@ internal static class TestServiceOverrides
|
||||
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
var verification = new VexAttestationVerification(true, ImmutableDictionary<string, string>.Empty);
|
||||
var verification = new VexAttestationVerification(true, VexAttestationDiagnostics.Empty);
|
||||
return ValueTask.FromResult(verification);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -504,6 +504,21 @@ public sealed class DefaultVexProviderRunnerTests
|
||||
bool includeGlobal,
|
||||
CancellationToken cancellationToken)
|
||||
=> ValueTask.FromResult(DefaultTrust);
|
||||
|
||||
public ValueTask<IssuerTrustResponseModel> SetIssuerTrustAsync(
|
||||
string tenantId,
|
||||
string issuerId,
|
||||
decimal weight,
|
||||
string? reason,
|
||||
CancellationToken cancellationToken)
|
||||
=> ValueTask.FromResult(DefaultTrust);
|
||||
|
||||
public ValueTask DeleteIssuerTrustAsync(
|
||||
string tenantId,
|
||||
string issuerId,
|
||||
string? reason,
|
||||
CancellationToken cancellationToken)
|
||||
=> ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class NoopSignatureVerifier : IVexSignatureVerifier
|
||||
@@ -655,25 +670,25 @@ public sealed class DefaultVexProviderRunnerTests
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class StubAttestationVerifier : IVexAttestationVerifier
|
||||
{
|
||||
private readonly bool _isValid;
|
||||
private readonly ImmutableDictionary<string, string> _diagnostics;
|
||||
|
||||
public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string> diagnostics)
|
||||
{
|
||||
_isValid = isValid;
|
||||
_diagnostics = diagnostics;
|
||||
}
|
||||
|
||||
public int Invocations { get; private set; }
|
||||
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
Invocations++;
|
||||
return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics));
|
||||
}
|
||||
}
|
||||
private sealed class StubAttestationVerifier : IVexAttestationVerifier
|
||||
{
|
||||
private readonly bool _isValid;
|
||||
private readonly VexAttestationDiagnostics _diagnostics;
|
||||
|
||||
public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string> diagnostics)
|
||||
{
|
||||
_isValid = isValid;
|
||||
_diagnostics = VexAttestationDiagnostics.FromBuilder(diagnostics.ToBuilder());
|
||||
}
|
||||
|
||||
public int Invocations { get; private set; }
|
||||
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
Invocations++;
|
||||
return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics));
|
||||
}
|
||||
}
|
||||
|
||||
private static VexRawDocument CreateAttestationRawDocument(DateTimeOffset observedAt)
|
||||
{
|
||||
|
||||
@@ -249,19 +249,21 @@ public sealed class WorkerSignatureVerifierTests
|
||||
private sealed class StubAttestationVerifier : IVexAttestationVerifier
|
||||
{
|
||||
private readonly bool _isValid;
|
||||
private readonly ImmutableDictionary<string, string> _diagnostics;
|
||||
private readonly VexAttestationDiagnostics _diagnostics;
|
||||
|
||||
public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string>? diagnostics = null)
|
||||
{
|
||||
_isValid = isValid;
|
||||
_diagnostics = diagnostics ?? ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
public int Invocations { get; private set; }
|
||||
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
Invocations++;
|
||||
{
|
||||
_isValid = isValid;
|
||||
_diagnostics = diagnostics is null
|
||||
? VexAttestationDiagnostics.Empty
|
||||
: VexAttestationDiagnostics.FromBuilder(diagnostics.ToBuilder());
|
||||
}
|
||||
|
||||
public int Invocations { get; private set; }
|
||||
|
||||
public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
Invocations++;
|
||||
return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics));
|
||||
}
|
||||
}
|
||||
@@ -269,7 +271,7 @@ public sealed class WorkerSignatureVerifierTests
|
||||
private sealed class StubIssuerDirectoryClient : IIssuerDirectoryClient
|
||||
{
|
||||
private readonly IReadOnlyList<IssuerKeyModel> _keys;
|
||||
private readonly IssuerTrustResponseModel _trust;
|
||||
private IssuerTrustResponseModel _trust;
|
||||
|
||||
private StubIssuerDirectoryClient(
|
||||
IReadOnlyList<IssuerKeyModel> keys,
|
||||
@@ -302,7 +304,7 @@ public sealed class WorkerSignatureVerifierTests
|
||||
null,
|
||||
null);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var now = DateTimeOffset.UnixEpoch;
|
||||
var overrideModel = new IssuerTrustOverrideModel(weight, "stub", now, "test", now, "test");
|
||||
return new StubIssuerDirectoryClient(
|
||||
new[] { key },
|
||||
@@ -322,6 +324,29 @@ public sealed class WorkerSignatureVerifierTests
|
||||
bool includeGlobal,
|
||||
CancellationToken cancellationToken)
|
||||
=> ValueTask.FromResult(_trust);
|
||||
|
||||
public ValueTask<IssuerTrustResponseModel> SetIssuerTrustAsync(
|
||||
string tenantId,
|
||||
string issuerId,
|
||||
decimal weight,
|
||||
string? reason,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var now = DateTimeOffset.UnixEpoch;
|
||||
var overrideModel = new IssuerTrustOverrideModel(weight, "stub-set", now, "test", now, "test");
|
||||
_trust = new IssuerTrustResponseModel(overrideModel, null, weight);
|
||||
return ValueTask.FromResult(_trust);
|
||||
}
|
||||
|
||||
public ValueTask DeleteIssuerTrustAsync(
|
||||
string tenantId,
|
||||
string issuerId,
|
||||
string? reason,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
_trust = new IssuerTrustResponseModel(null, null, 0m);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FixedTimeProvider : TimeProvider
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Scanner.WebService.Tests")]
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Scanner.WebService.Tests")]
|
||||
|
||||
@@ -97,6 +97,8 @@ builder.Services.AddSurfaceEnvironment(options =>
|
||||
builder.Services.AddSurfaceValidation();
|
||||
builder.Services.AddSurfaceFileCache();
|
||||
builder.Services.AddSurfaceSecrets();
|
||||
builder.Services.AddSingleton<IConfigureOptions<SurfaceCacheOptions>>(sp =>
|
||||
new SurfaceCacheOptionsConfigurator(sp.GetRequiredService<ISurfaceEnvironment>()));
|
||||
builder.Services.AddSingleton<ISurfacePointerService, SurfacePointerService>();
|
||||
builder.Services.AddSingleton<IRedisConnectionFactory, RedisConnectionFactory>();
|
||||
if (bootstrapOptions.Events is { Enabled: true } eventsOptions
|
||||
@@ -370,5 +372,24 @@ if (resolvedOptions.Features.EnablePolicyPreview)
|
||||
apiGroup.MapReportEndpoints(resolvedOptions.Api.ReportsSegment);
|
||||
apiGroup.MapRuntimeEndpoints(resolvedOptions.Api.RuntimeSegment);
|
||||
|
||||
app.MapOpenApiIfAvailable();
|
||||
await app.RunAsync().ConfigureAwait(false);
|
||||
app.MapOpenApiIfAvailable();
|
||||
await app.RunAsync().ConfigureAwait(false);
|
||||
|
||||
public partial class Program;
|
||||
|
||||
internal sealed class SurfaceCacheOptionsConfigurator : IConfigureOptions<SurfaceCacheOptions>
|
||||
{
|
||||
private readonly ISurfaceEnvironment _surfaceEnvironment;
|
||||
|
||||
public SurfaceCacheOptionsConfigurator(ISurfaceEnvironment surfaceEnvironment)
|
||||
{
|
||||
_surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment));
|
||||
}
|
||||
|
||||
public void Configure(SurfaceCacheOptions options)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
var settings = _surfaceEnvironment.Settings;
|
||||
options.RootDirectory = settings.CacheRoot.FullName;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCAN-REPLAY-186-001 | TODO | Scanner WebService Guild | REPLAY-CORE-185-001 | Implement scan `record` mode producing replay manifests/bundles, capture policy/feed/tool hashes, and update `docs/modules/scanner/architecture.md` referencing `docs/replay/DETERMINISTIC_REPLAY.md` Section 6. | API/worker integration tests cover record mode; docs merged; replay artifacts stored per spec. |
|
||||
| SCANNER-SURFACE-02 | DONE (2025-11-05) | Scanner WebService Guild | SURFACE-FS-02 | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata.<br>2025-11-05: Surface pointers projected through scan/report endpoints, orchestrator samples + DSSE fixtures refreshed with manifest block, readiness tests updated to use validator stub. | OpenAPI updated; clients regenerated; integration tests validate pointer presence and tenancy. |
|
||||
| SCANNER-ENV-02 | DOING (2025-11-02) | Scanner WebService Guild, Ops Guild | SURFACE-ENV-02 | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration.<br>2025-11-02: Cache root resolution switched to helper; feature flag bindings updated; Helm/Compose updates pending review. | Service uses helper; env table documented; helm/compose templates updated. |
|
||||
| SCANNER-ENV-02 | DOING (2025-11-02) | Scanner WebService Guild, Ops Guild | SURFACE-ENV-02 | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration.<br>2025-11-02: Cache root resolution switched to helper; feature flag bindings updated; Helm/Compose updates pending review.<br>2025-11-05 14:55Z: Aligning readiness checks, docs, and Helm/Compose templates with Surface.Env outputs and planning test coverage for configuration fallbacks.<br>2025-11-06 17:05Z: Surface.Env documentation/README refreshed; warning catalogue captured for ops handoff. | Service uses helper; env table documented; helm/compose templates updated. |
|
||||
> 2025-11-05 19:18Z: Added configurator to project wiring and unit test ensuring Surface.Env cache root is honoured.
|
||||
| SCANNER-SECRETS-02 | DOING (2025-11-02) | Scanner WebService Guild, Security Guild | SURFACE-SECRETS-02 | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens).<br>2025-11-02: Export/report flows now depend on Surface.Secrets stub; integration tests in progress. | Secrets fetched through shared provider; unit/integration tests cover rotation + failure cases. |
|
||||
| SCANNER-EVENTS-16-301 | BLOCKED (2025-10-26) | Scanner WebService Guild | ORCH-SVC-38-101, NOTIFY-SVC-38-001 | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Tests assert envelope schema + orchestrator publish; Notifier consumer harness passes; docs updated with new event contract. Blocked by .NET 10 preview OpenAPI/Auth dependency drift preventing `dotnet test` completion. |
|
||||
| SCANNER-EVENTS-16-302 | DOING (2025-10-26) | Scanner WebService Guild | SCANNER-EVENTS-16-301 | Extend orchestrator event links (report/policy/attestation) once endpoints are finalised across gateway + console. | Links section covers UI/API targets; downstream consumers validated; docs/samples updated. |
|
||||
|
||||
3
src/Scanner/StellaOps.Scanner.Worker/AssemblyInfo.cs
Normal file
3
src/Scanner/StellaOps.Scanner.Worker/AssemblyInfo.cs
Normal file
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Scanner.Worker.Tests")]
|
||||
@@ -36,6 +36,8 @@ builder.Services.AddSurfaceEnvironment(options =>
|
||||
builder.Services.AddSurfaceValidation();
|
||||
builder.Services.AddSurfaceFileCache();
|
||||
builder.Services.AddSurfaceSecrets();
|
||||
builder.Services.AddSingleton<IConfigureOptions<SurfaceCacheOptions>>(sp =>
|
||||
new SurfaceCacheOptionsConfigurator(sp.GetRequiredService<ISurfaceEnvironment>()));
|
||||
builder.Services.AddSingleton<ScannerWorkerMetrics>();
|
||||
builder.Services.AddSingleton<ScanProgressReporter>();
|
||||
builder.Services.AddSingleton<ScanJobProcessor>();
|
||||
@@ -127,3 +129,20 @@ var host = builder.Build();
|
||||
await host.RunAsync();
|
||||
|
||||
public partial class Program;
|
||||
|
||||
internal sealed class SurfaceCacheOptionsConfigurator : IConfigureOptions<SurfaceCacheOptions>
|
||||
{
|
||||
private readonly ISurfaceEnvironment _surfaceEnvironment;
|
||||
|
||||
public SurfaceCacheOptionsConfigurator(ISurfaceEnvironment surfaceEnvironment)
|
||||
{
|
||||
_surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment));
|
||||
}
|
||||
|
||||
public void Configure(SurfaceCacheOptions options)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
var settings = _surfaceEnvironment.Settings;
|
||||
options.RootDirectory = settings.CacheRoot.FullName;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,5 +4,6 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCAN-REPLAY-186-002 | TODO | Scanner Worker Guild | REPLAY-CORE-185-001 | Enforce deterministic analyzer execution when consuming replay input bundles, emit layer Merkle metadata, and author `docs/modules/scanner/deterministic-execution.md` summarising invariants from `docs/replay/DETERMINISTIC_REPLAY.md` Section 4. | Replay mode analyzers pass determinism tests; new doc merged; integration fixtures updated. |
|
||||
| SCANNER-SURFACE-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-FS-02 | Persist Surface.FS manifests after analyzer stages, including layer CAS metadata and EntryTrace fragments.<br>2025-11-02: Draft Surface.FS manifests emitted for sample scans; telemetry counters under review. | Integration tests prove cache entries exist; telemetry counters exported. |
|
||||
| SCANNER-ENV-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-ENV-02 | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints.<br>2025-11-02: Worker bootstrap now resolves cache roots via helper; warning path documented; smoke tests running. | Worker boots with helper; misconfiguration warnings documented; smoke tests updated. |
|
||||
| SCANNER-ENV-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-ENV-02 | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints.<br>2025-11-02: Worker bootstrap now resolves cache roots via helper; warning path documented; smoke tests running.<br>2025-11-05 14:55Z: Extending helper usage into cache/secrets configuration, updating worker validator wiring, and drafting docs/tests for new Surface.Env outputs.<br>2025-11-06 17:05Z: README/design docs updated with warning catalogue; startup logging guidance captured for ops runbooks. | Worker boots with helper; misconfiguration warnings documented; smoke tests updated. |
|
||||
> 2025-11-05 19:18Z: Bound `SurfaceCacheOptions` root directory to resolved Surface.Env settings and added unit coverage around the configurator.
|
||||
| SCANNER-SECRETS-01 | DOING (2025-11-02) | Scanner Worker Guild, Security Guild | SURFACE-SECRETS-02 | Adopt `StellaOps.Scanner.Surface.Secrets` for registry/CAS credentials during scan execution.<br>2025-11-02: Surface.Secrets provider wired for CAS token retrieval; integration tests added. | Secrets fetched via shared provider; legacy secret code removed; integration tests cover rotation. |
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography.X509Certificates;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.FS;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class SurfaceCacheOptionsConfiguratorTests
|
||||
{
|
||||
[Fact]
|
||||
public void Configure_UsesSurfaceEnvironmentCacheRoot()
|
||||
{
|
||||
var cacheRoot = new DirectoryInfo(Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")));
|
||||
var settings = new SurfaceEnvironmentSettings(
|
||||
new Uri("https://surface.example"),
|
||||
"surface-cache",
|
||||
null,
|
||||
cacheRoot,
|
||||
cacheQuotaMegabytes: 512,
|
||||
prefetchEnabled: true,
|
||||
featureFlags: Array.Empty<string>(),
|
||||
secrets: new SurfaceSecretsConfiguration("file", "tenant-b", "/etc/secrets", null, null, allowInline: false),
|
||||
tenant: "tenant-b",
|
||||
tls: new SurfaceTlsConfiguration(null, null, new X509Certificate2Collection()));
|
||||
|
||||
var environment = new StubSurfaceEnvironment(settings);
|
||||
var configurator = new SurfaceCacheOptionsConfigurator(environment);
|
||||
var options = new SurfaceCacheOptions();
|
||||
|
||||
configurator.Configure(options);
|
||||
|
||||
Assert.Equal(cacheRoot.FullName, options.RootDirectory);
|
||||
}
|
||||
|
||||
private sealed class StubSurfaceEnvironment : ISurfaceEnvironment
|
||||
{
|
||||
public StubSurfaceEnvironment(SurfaceEnvironmentSettings settings)
|
||||
{
|
||||
Settings = settings;
|
||||
}
|
||||
|
||||
public SurfaceEnvironmentSettings Settings { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> RawVariables { get; } = new Dictionary<string, string>();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography.X509Certificates;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.FS;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class SurfaceCacheOptionsConfiguratorTests
|
||||
{
|
||||
[Fact]
|
||||
public void Configure_UsesSurfaceEnvironmentCacheRoot()
|
||||
{
|
||||
var cacheRoot = new DirectoryInfo(Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")));
|
||||
var settings = new SurfaceEnvironmentSettings(
|
||||
new Uri("https://surface.example"),
|
||||
"surface-cache",
|
||||
null,
|
||||
cacheRoot,
|
||||
cacheQuotaMegabytes: 1024,
|
||||
prefetchEnabled: false,
|
||||
featureFlags: Array.Empty<string>(),
|
||||
secrets: new SurfaceSecretsConfiguration("file", "tenant-a", "/etc/secrets", null, null, false),
|
||||
tenant: "tenant-a",
|
||||
tls: new SurfaceTlsConfiguration(null, null, new X509Certificate2Collection()));
|
||||
|
||||
var environment = new StubSurfaceEnvironment(settings);
|
||||
var configurator = new SurfaceCacheOptionsConfigurator(environment);
|
||||
var options = new SurfaceCacheOptions();
|
||||
|
||||
configurator.Configure(options);
|
||||
|
||||
Assert.Equal(cacheRoot.FullName, options.RootDirectory);
|
||||
}
|
||||
|
||||
private sealed class StubSurfaceEnvironment : ISurfaceEnvironment
|
||||
{
|
||||
public StubSurfaceEnvironment(SurfaceEnvironmentSettings settings)
|
||||
{
|
||||
Settings = settings;
|
||||
}
|
||||
|
||||
public SurfaceEnvironmentSettings Settings { get; }
|
||||
|
||||
public IReadOnlyDictionary<string, string> RawVariables { get; } = new Dictionary<string, string>();
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
namespace StellaOps.Scheduler.WebService.GraphJobs;
|
||||
|
||||
internal readonly record struct GraphJobUpdateResult<TJob>(bool Updated, TJob Job) where TJob : class
|
||||
public readonly record struct GraphJobUpdateResult<TJob>(bool Updated, TJob Job) where TJob : class
|
||||
{
|
||||
public static GraphJobUpdateResult<TJob> UpdatedResult(TJob job) => new(true, job);
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ internal sealed class PolicySimulationMetricsProvider : IPolicySimulationMetrics
|
||||
private readonly Histogram<double> _latencyHistogram;
|
||||
private readonly object _snapshotLock = new();
|
||||
private IReadOnlyDictionary<string, long> _latestQueueSnapshot = new Dictionary<string, long>(StringComparer.Ordinal);
|
||||
private string _latestTenantId = string.Empty;
|
||||
private bool _disposed;
|
||||
|
||||
public PolicySimulationMetricsProvider(IPolicyRunJobRepository repository, TimeProvider? timeProvider = null)
|
||||
@@ -86,6 +87,7 @@ internal sealed class PolicySimulationMetricsProvider : IPolicySimulationMetrics
|
||||
lock (_snapshotLock)
|
||||
{
|
||||
_latestQueueSnapshot = queueCounts;
|
||||
_latestTenantId = tenantId;
|
||||
}
|
||||
|
||||
var sampleSize = 200;
|
||||
@@ -134,16 +136,21 @@ internal sealed class PolicySimulationMetricsProvider : IPolicySimulationMetrics
|
||||
private IEnumerable<Measurement<long>> ObserveQueueDepth()
|
||||
{
|
||||
IReadOnlyDictionary<string, long> snapshot;
|
||||
string tenantId;
|
||||
lock (_snapshotLock)
|
||||
{
|
||||
snapshot = _latestQueueSnapshot;
|
||||
tenantId = _latestTenantId;
|
||||
}
|
||||
|
||||
tenantId = string.IsNullOrWhiteSpace(tenantId) ? "unknown" : tenantId;
|
||||
|
||||
foreach (var pair in snapshot)
|
||||
{
|
||||
yield return new Measurement<long>(
|
||||
pair.Value,
|
||||
new KeyValuePair<string, object?>("status", pair.Key));
|
||||
new KeyValuePair<string, object?>("status", pair.Key),
|
||||
new KeyValuePair<string, object?>("tenantId", tenantId));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -30,7 +30,8 @@
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCHED-CONSOLE-27-001 | DONE (2025-11-03) | Scheduler WebService Guild, Policy Registry Guild | SCHED-WEB-16-103, REGISTRY-API-27-005 | Provide policy batch simulation orchestration endpoints (`/policies/simulations` POST/GET) exposing run creation, shard status, SSE progress, cancellation, and retries with RBAC enforcement. | API handles shard lifecycle with SSE heartbeats + retry headers; unauthorized requests rejected; integration tests cover submit/cancel/resume flows. |
|
||||
| SCHED-CONSOLE-27-002 | DOING (2025-11-03) | Scheduler WebService Guild, Observability Guild | SCHED-CONSOLE-27-001 | Emit telemetry endpoints/metrics (`policy_simulation_queue_depth`, `policy_simulation_latency`) and webhook callbacks for completion/failure consumed by Registry. | Metrics exposed via gateway, dashboards seeded, webhook contract documented, integration tests validate metrics emission. |
|
||||
| SCHED-CONSOLE-27-002 | DOING (2025-11-03) | Scheduler WebService Guild, Observability Guild | SCHED-CONSOLE-27-001 | Emit telemetry endpoints/metrics (`policy_simulation_queue_depth`, `policy_simulation_latency`) and webhook callbacks for completion/failure consumed by Registry. | Metrics exposed via gateway, dashboards seeded, webhook contract documented, integration tests validate metrics emission. |
|
||||
> 2025-11-06: Added tenant-aware tagging to `policy_simulation_queue_depth` metrics and unit coverage for the metrics provider snapshot.
|
||||
|
||||
## Vulnerability Explorer (Sprint 29)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|
||||
@@ -0,0 +1,175 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.WebService.PolicySimulations;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
public sealed class PolicySimulationMetricsProviderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CaptureAsync_UpdatesSnapshotAndEmitsTenantTaggedGauge()
|
||||
{
|
||||
var repository = new StubPolicyRunJobRepository();
|
||||
repository.QueueCounts[PolicyRunJobStatus.Pending] = 3;
|
||||
repository.QueueCounts[PolicyRunJobStatus.Dispatching] = 1;
|
||||
repository.QueueCounts[PolicyRunJobStatus.Submitted] = 2;
|
||||
|
||||
var now = DateTimeOffset.Parse("2025-11-06T10:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal);
|
||||
repository.TerminalJobs.Add(CreateJob("job-1", PolicyRunJobStatus.Completed, now.AddMinutes(-30), now.AddMinutes(-5)));
|
||||
repository.TerminalJobs.Add(CreateJob("job-2", PolicyRunJobStatus.Failed, now.AddMinutes(-20), now.AddMinutes(-2)));
|
||||
|
||||
using var provider = new PolicySimulationMetricsProvider(repository);
|
||||
|
||||
var response = await provider.CaptureAsync("tenant-alpha", CancellationToken.None);
|
||||
|
||||
Assert.Equal(6, response.QueueDepth.Total);
|
||||
Assert.Equal(3, response.QueueDepth.ByStatus["pending"]);
|
||||
Assert.Equal(2, response.QueueDepth.ByStatus["submitted"]);
|
||||
|
||||
var measurements = new List<(string Status, string Tenant, long Value)>();
|
||||
using var listener = new MeterListener
|
||||
{
|
||||
InstrumentPublished = (instrument, listener) =>
|
||||
{
|
||||
if (instrument.Meter.Name == "StellaOps.Scheduler.WebService.PolicySimulations" &&
|
||||
instrument.Name == "policy_simulation_queue_depth")
|
||||
{
|
||||
listener.EnableMeasurementEvents(instrument);
|
||||
}
|
||||
}
|
||||
};
|
||||
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
|
||||
{
|
||||
var status = "";
|
||||
var tenant = "";
|
||||
foreach (var tag in tags)
|
||||
{
|
||||
if (string.Equals(tag.Key, "status", StringComparison.Ordinal))
|
||||
{
|
||||
status = tag.Value?.ToString() ?? string.Empty;
|
||||
}
|
||||
|
||||
if (string.Equals(tag.Key, "tenantId", StringComparison.Ordinal))
|
||||
{
|
||||
tenant = tag.Value?.ToString() ?? string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
measurements.Add((status, tenant, measurement));
|
||||
});
|
||||
listener.Start();
|
||||
listener.RecordObservableInstruments();
|
||||
|
||||
Assert.Contains(measurements, item =>
|
||||
item.Status == "pending" &&
|
||||
item.Tenant == "tenant-alpha" &&
|
||||
item.Value == 3);
|
||||
}
|
||||
|
||||
private static PolicyRunJob CreateJob(string id, PolicyRunJobStatus status, DateTimeOffset queuedAt, DateTimeOffset finishedAt)
|
||||
{
|
||||
DateTimeOffset? submittedAt = status is PolicyRunJobStatus.Completed or PolicyRunJobStatus.Failed
|
||||
? queuedAt.AddMinutes(2)
|
||||
: null;
|
||||
DateTimeOffset? completedAt = status is PolicyRunJobStatus.Completed or PolicyRunJobStatus.Failed
|
||||
? finishedAt
|
||||
: null;
|
||||
DateTimeOffset? cancelledAt = status is PolicyRunJobStatus.Cancelled ? finishedAt : null;
|
||||
var lastError = status is PolicyRunJobStatus.Failed ? "policy engine timeout" : null;
|
||||
|
||||
return new PolicyRunJob(
|
||||
SchedulerSchemaVersions.PolicyRunJob,
|
||||
id,
|
||||
"tenant-alpha",
|
||||
"policy-x",
|
||||
1,
|
||||
PolicyRunMode.Simulate,
|
||||
PolicyRunPriority.Normal,
|
||||
0,
|
||||
$"run-{id}",
|
||||
"user:actor",
|
||||
null,
|
||||
null,
|
||||
PolicyRunInputs.Empty,
|
||||
queuedAt,
|
||||
status,
|
||||
1,
|
||||
finishedAt,
|
||||
status == PolicyRunJobStatus.Failed ? "policy engine timeout" : null,
|
||||
queuedAt,
|
||||
finishedAt,
|
||||
finishedAt,
|
||||
submittedAt,
|
||||
completedAt,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
null,
|
||||
null,
|
||||
cancelledAt);
|
||||
}
|
||||
|
||||
private sealed class StubPolicyRunJobRepository : IPolicyRunJobRepository
|
||||
{
|
||||
public Dictionary<PolicyRunJobStatus, long> QueueCounts { get; } = new();
|
||||
public List<PolicyRunJob> TerminalJobs { get; } = new();
|
||||
|
||||
public Task<long> CountAsync(string tenantId, PolicyRunMode mode, IReadOnlyCollection<PolicyRunJobStatus> statuses, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var total = 0L;
|
||||
foreach (var status in statuses)
|
||||
{
|
||||
if (QueueCounts.TryGetValue(status, out var count))
|
||||
{
|
||||
total += count;
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(total);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<PolicyRunJob>> ListAsync(
|
||||
string tenantId,
|
||||
string? policyId = null,
|
||||
PolicyRunMode? mode = null,
|
||||
IReadOnlyCollection<PolicyRunJobStatus>? statuses = null,
|
||||
DateTimeOffset? queuedAfter = null,
|
||||
int limit = 50,
|
||||
IClientSessionHandle? session = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
IReadOnlyList<PolicyRunJob> filtered = TerminalJobs;
|
||||
if (statuses is { Count: > 0 })
|
||||
{
|
||||
filtered = TerminalJobs.Where(job => statuses.Contains(job.Status)).ToList();
|
||||
}
|
||||
|
||||
return Task.FromResult(filtered);
|
||||
}
|
||||
|
||||
public Task<PolicyRunJob?> GetAsync(string tenantId, string jobId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<PolicyRunJob?>(null);
|
||||
|
||||
public Task<PolicyRunJob?> GetByRunIdAsync(string tenantId, string runId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<PolicyRunJob?>(null);
|
||||
|
||||
public Task InsertAsync(PolicyRunJob job, IClientSessionHandle? session = null, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task<PolicyRunJob?> LeaseAsync(string leaseOwner, DateTimeOffset now, TimeSpan leaseDuration, int maxAttempts, IClientSessionHandle? session = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<PolicyRunJob?>(null);
|
||||
|
||||
public Task<bool> ReplaceAsync(PolicyRunJob job, string? expectedLeaseOwner = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(true);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public interface IPackRunArtifactUploader
|
||||
{
|
||||
Task UploadAsync(
|
||||
PackRunExecutionContext context,
|
||||
PackRunState state,
|
||||
IReadOnlyList<TaskPackPlanOutput> outputs,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public interface IPackRunJobScheduler
|
||||
{
|
||||
Task ScheduleAsync(PackRunExecutionContext context, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -3,27 +3,33 @@ using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Core.Execution;
|
||||
|
||||
public sealed record PackRunState(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> Steps)
|
||||
public sealed record PackRunState(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlan Plan,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset RequestedAt,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> Steps)
|
||||
{
|
||||
public static PackRunState Create(
|
||||
string runId,
|
||||
string planHash,
|
||||
TaskPackPlanFailurePolicy failurePolicy,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> steps,
|
||||
DateTimeOffset timestamp)
|
||||
=> new(
|
||||
runId,
|
||||
planHash,
|
||||
failurePolicy,
|
||||
timestamp,
|
||||
timestamp,
|
||||
new ReadOnlyDictionary<string, PackRunStepStateRecord>(new Dictionary<string, PackRunStepStateRecord>(steps, StringComparer.Ordinal)));
|
||||
string planHash,
|
||||
TaskPackPlan plan,
|
||||
TaskPackPlanFailurePolicy failurePolicy,
|
||||
DateTimeOffset requestedAt,
|
||||
IReadOnlyDictionary<string, PackRunStepStateRecord> steps,
|
||||
DateTimeOffset timestamp)
|
||||
=> new(
|
||||
runId,
|
||||
planHash,
|
||||
plan,
|
||||
failurePolicy,
|
||||
requestedAt,
|
||||
timestamp,
|
||||
timestamp,
|
||||
new ReadOnlyDictionary<string, PackRunStepStateRecord>(new Dictionary<string, PackRunStepStateRecord>(steps, StringComparer.Ordinal)));
|
||||
}
|
||||
|
||||
public sealed record PackRunStepStateRecord(
|
||||
|
||||
@@ -110,18 +110,20 @@ public sealed class FilePackRunStateStore : IPackRunStateStore
|
||||
return result;
|
||||
}
|
||||
|
||||
private sealed record StateDocument(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<StepDocument> Steps)
|
||||
{
|
||||
public static StateDocument FromDomain(PackRunState state)
|
||||
{
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
private sealed record StateDocument(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
TaskPackPlan Plan,
|
||||
TaskPackPlanFailurePolicy FailurePolicy,
|
||||
DateTimeOffset RequestedAt,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<StepDocument> Steps)
|
||||
{
|
||||
public static StateDocument FromDomain(PackRunState state)
|
||||
{
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new StepDocument(
|
||||
step.StepId,
|
||||
step.Kind,
|
||||
@@ -137,15 +139,17 @@ public sealed class FilePackRunStateStore : IPackRunStateStore
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new StateDocument(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
state.FailurePolicy,
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
|
||||
return new StateDocument(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
state.Plan,
|
||||
state.FailurePolicy,
|
||||
state.RequestedAt,
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
|
||||
public PackRunState ToDomain()
|
||||
{
|
||||
var steps = Steps.ToDictionary(
|
||||
@@ -165,14 +169,16 @@ public sealed class FilePackRunStateStore : IPackRunStateStore
|
||||
step.StatusReason),
|
||||
StringComparer.Ordinal);
|
||||
|
||||
return new PackRunState(
|
||||
RunId,
|
||||
PlanHash,
|
||||
FailurePolicy,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
return new PackRunState(
|
||||
RunId,
|
||||
PlanHash,
|
||||
Plan,
|
||||
FailurePolicy,
|
||||
RequestedAt,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record StepDocument(
|
||||
|
||||
@@ -4,13 +4,13 @@ using StellaOps.AirGap.Policy;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Core.TaskPacks;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher
|
||||
{
|
||||
private readonly string queuePath;
|
||||
private readonly string archivePath;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher, IPackRunJobScheduler
|
||||
{
|
||||
private readonly string queuePath;
|
||||
private readonly string archivePath;
|
||||
private readonly TaskPackManifestLoader manifestLoader = new();
|
||||
private readonly TaskPackPlanner planner;
|
||||
private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web);
|
||||
@@ -30,11 +30,11 @@ public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher
|
||||
.OrderBy(path => path, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
var jobJson = await File.ReadAllTextAsync(file, cancellationToken).ConfigureAwait(false);
|
||||
var job = JsonSerializer.Deserialize<JobEnvelope>(jobJson, serializerOptions);
|
||||
@@ -43,38 +43,69 @@ public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher
|
||||
continue;
|
||||
}
|
||||
|
||||
var manifestPath = ResolvePath(queuePath, job.ManifestPath);
|
||||
var inputsPath = job.InputsPath is null ? null : ResolvePath(queuePath, job.InputsPath);
|
||||
|
||||
var manifest = manifestLoader.Load(manifestPath);
|
||||
var inputs = await LoadInputsAsync(inputsPath, cancellationToken).ConfigureAwait(false);
|
||||
var planResult = planner.Plan(manifest, inputs);
|
||||
if (!planResult.Success || planResult.Plan is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to plan pack for run {job.RunId}: {string.Join(';', planResult.Errors.Select(e => e.Message))}");
|
||||
}
|
||||
|
||||
var archiveFile = Path.Combine(archivePath, Path.GetFileName(file));
|
||||
File.Move(file, archiveFile, overwrite: true);
|
||||
|
||||
var requestedAt = job.RequestedAt ?? DateTimeOffset.UtcNow;
|
||||
return new PackRunExecutionContext(job.RunId ?? Guid.NewGuid().ToString("n"), planResult.Plan, requestedAt);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
TaskPackPlan? plan = job.Plan;
|
||||
if (plan is null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(job.ManifestPath))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var manifestPath = ResolvePath(queuePath, job.ManifestPath);
|
||||
var inputsPath = job.InputsPath is null ? null : ResolvePath(queuePath, job.InputsPath);
|
||||
|
||||
var manifest = manifestLoader.Load(manifestPath);
|
||||
var inputs = await LoadInputsAsync(inputsPath, cancellationToken).ConfigureAwait(false);
|
||||
var planResult = planner.Plan(manifest, inputs);
|
||||
if (!planResult.Success || planResult.Plan is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to plan pack for run {job.RunId}: {string.Join(';', planResult.Errors.Select(e => e.Message))}");
|
||||
}
|
||||
|
||||
plan = planResult.Plan;
|
||||
}
|
||||
|
||||
var archiveFile = Path.Combine(archivePath, Path.GetFileName(file));
|
||||
File.Move(file, archiveFile, overwrite: true);
|
||||
|
||||
var requestedAt = job.RequestedAt ?? DateTimeOffset.UtcNow;
|
||||
var runId = string.IsNullOrWhiteSpace(job.RunId) ? Guid.NewGuid().ToString("n") : job.RunId;
|
||||
return new PackRunExecutionContext(runId, plan, requestedAt);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var failedPath = file + ".failed";
|
||||
File.Move(file, failedPath, overwrite: true);
|
||||
Console.Error.WriteLine($"Failed to dequeue job '{file}': {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string ResolvePath(string root, string relative)
|
||||
=> Path.IsPathRooted(relative) ? relative : Path.Combine(root, relative);
|
||||
|
||||
private static async Task<IDictionary<string, JsonNode?>> LoadInputsAsync(string? path, CancellationToken cancellationToken)
|
||||
return null;
|
||||
}
|
||||
|
||||
public async Task ScheduleAsync(PackRunExecutionContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var envelope = new JobEnvelope(
|
||||
context.RunId,
|
||||
ManifestPath: null,
|
||||
InputsPath: null,
|
||||
context.RequestedAt,
|
||||
context.Plan);
|
||||
|
||||
Directory.CreateDirectory(queuePath);
|
||||
var safeRunId = string.IsNullOrWhiteSpace(context.RunId) ? Guid.NewGuid().ToString("n") : SanitizeFileName(context.RunId);
|
||||
var fileName = $"{safeRunId}-{DateTimeOffset.UtcNow:yyyyMMddHHmmssfff}.json";
|
||||
var path = Path.Combine(queuePath, fileName);
|
||||
var json = JsonSerializer.Serialize(envelope, serializerOptions);
|
||||
await File.WriteAllTextAsync(path, json, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static string ResolvePath(string root, string relative)
|
||||
=> Path.IsPathRooted(relative) ? relative : Path.Combine(root, relative);
|
||||
|
||||
private static async Task<IDictionary<string, JsonNode?>> LoadInputsAsync(string? path, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path) || !File.Exists(path))
|
||||
{
|
||||
@@ -92,7 +123,23 @@ public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher
|
||||
pair => pair.Key,
|
||||
pair => pair.Value,
|
||||
StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
private sealed record JobEnvelope(string? RunId, string ManifestPath, string? InputsPath, DateTimeOffset? RequestedAt);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record JobEnvelope(
|
||||
string? RunId,
|
||||
string? ManifestPath,
|
||||
string? InputsPath,
|
||||
DateTimeOffset? RequestedAt,
|
||||
TaskPackPlan? Plan);
|
||||
|
||||
private static string SanitizeFileName(string value)
|
||||
{
|
||||
var safe = value.Trim();
|
||||
foreach (var invalid in Path.GetInvalidFileNameChars())
|
||||
{
|
||||
safe = safe.Replace(invalid, '_');
|
||||
}
|
||||
|
||||
return safe;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
public sealed class LoggingPackRunArtifactUploader : IPackRunArtifactUploader
|
||||
{
|
||||
private readonly ILogger<LoggingPackRunArtifactUploader> _logger;
|
||||
|
||||
public LoggingPackRunArtifactUploader(ILogger<LoggingPackRunArtifactUploader> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public Task UploadAsync(
|
||||
PackRunExecutionContext context,
|
||||
PackRunState state,
|
||||
IReadOnlyList<TaskPackPlanOutput> outputs,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (outputs.Count == 0)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
var path = output.Path?.Value?.ToString() ?? "(dynamic)";
|
||||
_logger.LogInformation(
|
||||
"Pack run {RunId} scheduled artifact upload for output {Output} (type={Type}, path={Path}).",
|
||||
context.RunId,
|
||||
output.Name,
|
||||
output.Type,
|
||||
path);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
public sealed class PackRunApprovalDecisionService
|
||||
{
|
||||
private readonly IPackRunApprovalStore _approvalStore;
|
||||
private readonly IPackRunStateStore _stateStore;
|
||||
private readonly IPackRunJobScheduler _scheduler;
|
||||
private readonly ILogger<PackRunApprovalDecisionService> _logger;
|
||||
|
||||
public PackRunApprovalDecisionService(
|
||||
IPackRunApprovalStore approvalStore,
|
||||
IPackRunStateStore stateStore,
|
||||
IPackRunJobScheduler scheduler,
|
||||
ILogger<PackRunApprovalDecisionService> logger)
|
||||
{
|
||||
_approvalStore = approvalStore ?? throw new ArgumentNullException(nameof(approvalStore));
|
||||
_stateStore = stateStore ?? throw new ArgumentNullException(nameof(stateStore));
|
||||
_scheduler = scheduler ?? throw new ArgumentNullException(nameof(scheduler));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<PackRunApprovalDecisionResult> ApplyAsync(
|
||||
PackRunApprovalDecisionRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.RunId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.ApprovalId);
|
||||
|
||||
var runId = request.RunId.Trim();
|
||||
var approvalId = request.ApprovalId.Trim();
|
||||
|
||||
var state = await _stateStore.GetAsync(runId, cancellationToken).ConfigureAwait(false);
|
||||
if (state is null)
|
||||
{
|
||||
_logger.LogWarning("Approval decision for run {RunId} rejected – run state not found.", runId);
|
||||
return PackRunApprovalDecisionResult.NotFound;
|
||||
}
|
||||
|
||||
var approvals = await _approvalStore.GetAsync(runId, cancellationToken).ConfigureAwait(false);
|
||||
if (approvals.Count == 0)
|
||||
{
|
||||
_logger.LogWarning("Approval decision for run {RunId} rejected – approval state not found.", runId);
|
||||
return PackRunApprovalDecisionResult.NotFound;
|
||||
}
|
||||
|
||||
var requestedAt = state.RequestedAt != default ? state.RequestedAt : state.CreatedAt;
|
||||
var coordinator = PackRunApprovalCoordinator.Restore(state.Plan, approvals, requestedAt);
|
||||
|
||||
ApprovalActionResult actionResult;
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
switch (request.Decision)
|
||||
{
|
||||
case PackRunApprovalDecisionType.Approved:
|
||||
actionResult = coordinator.Approve(approvalId, request.ActorId ?? "system", now, request.Summary);
|
||||
break;
|
||||
|
||||
case PackRunApprovalDecisionType.Rejected:
|
||||
actionResult = coordinator.Reject(approvalId, request.ActorId ?? "system", now, request.Summary);
|
||||
break;
|
||||
|
||||
case PackRunApprovalDecisionType.Expired:
|
||||
actionResult = coordinator.Expire(approvalId, now, request.Summary);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException(nameof(request.Decision), request.Decision, "Unsupported approval decision.");
|
||||
}
|
||||
|
||||
await _approvalStore.UpdateAsync(runId, actionResult.State, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Applied approval decision {Decision} for run {RunId} (approval {ApprovalId}, actor={ActorId}).",
|
||||
request.Decision,
|
||||
runId,
|
||||
approvalId,
|
||||
request.ActorId ?? "(system)");
|
||||
|
||||
if (actionResult.ShouldResumeRun && request.Decision == PackRunApprovalDecisionType.Approved)
|
||||
{
|
||||
var context = new PackRunExecutionContext(runId, state.Plan, requestedAt);
|
||||
await _scheduler.ScheduleAsync(context, cancellationToken).ConfigureAwait(false);
|
||||
_logger.LogInformation("Scheduled run {RunId} for resume after approvals completed.", runId);
|
||||
return PackRunApprovalDecisionResult.Resumed;
|
||||
}
|
||||
|
||||
return PackRunApprovalDecisionResult.Applied;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record PackRunApprovalDecisionRequest(
|
||||
string RunId,
|
||||
string ApprovalId,
|
||||
PackRunApprovalDecisionType Decision,
|
||||
string? ActorId,
|
||||
string? Summary);
|
||||
|
||||
public enum PackRunApprovalDecisionType
|
||||
{
|
||||
Approved,
|
||||
Rejected,
|
||||
Expired
|
||||
}
|
||||
|
||||
public sealed record PackRunApprovalDecisionResult(string Status)
|
||||
{
|
||||
public static PackRunApprovalDecisionResult NotFound { get; } = new("not_found");
|
||||
public static PackRunApprovalDecisionResult Applied { get; } = new("applied");
|
||||
public static PackRunApprovalDecisionResult Resumed { get; } = new("resumed");
|
||||
|
||||
public bool ShouldResume => ReferenceEquals(this, Resumed);
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
@@ -60,12 +61,34 @@ public sealed class FilePackRunStateStoreTests
|
||||
|
||||
private static PackRunState CreateState(string runId)
|
||||
{
|
||||
var failurePolicy = new TaskPackPlanFailurePolicy(MaxAttempts: 3, BackoffSeconds: 30, ContinueOnError: false);
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal)
|
||||
{
|
||||
["step-a"] = new PackRunStepStateRecord(
|
||||
StepId: "step-a",
|
||||
Kind: PackRunStepKind.Run,
|
||||
var failurePolicy = new TaskPackPlanFailurePolicy(MaxAttempts: 3, BackoffSeconds: 30, ContinueOnError: false);
|
||||
var metadata = new TaskPackPlanMetadata("sample", "1.0.0", null, Array.Empty<string>());
|
||||
var parameters = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal);
|
||||
var stepPlan = new TaskPackPlanStep(
|
||||
Id: "step-a",
|
||||
TemplateId: "run/image",
|
||||
Name: "Run step",
|
||||
Type: "run",
|
||||
Enabled: true,
|
||||
Uses: "builtin/run",
|
||||
Parameters: parameters,
|
||||
ApprovalId: null,
|
||||
GateMessage: null,
|
||||
Children: Array.Empty<TaskPackPlanStep>());
|
||||
var plan = new TaskPackPlan(
|
||||
metadata,
|
||||
new Dictionary<string, JsonNode?>(StringComparer.Ordinal),
|
||||
new[] { stepPlan },
|
||||
"hash-123",
|
||||
Array.Empty<TaskPackPlanApproval>(),
|
||||
Array.Empty<TaskPackPlanSecret>(),
|
||||
Array.Empty<TaskPackPlanOutput>(),
|
||||
failurePolicy);
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal)
|
||||
{
|
||||
["step-a"] = new PackRunStepStateRecord(
|
||||
StepId: "step-a",
|
||||
Kind: PackRunStepKind.Run,
|
||||
Enabled: true,
|
||||
ContinueOnError: false,
|
||||
MaxParallel: null,
|
||||
@@ -75,10 +98,11 @@ public sealed class FilePackRunStateStoreTests
|
||||
Attempts: 1,
|
||||
LastTransitionAt: DateTimeOffset.UtcNow,
|
||||
NextAttemptAt: null,
|
||||
StatusReason: null)
|
||||
};
|
||||
|
||||
return PackRunState.Create(runId, "hash-123", failurePolicy, steps, DateTimeOffset.UtcNow);
|
||||
StatusReason: null)
|
||||
};
|
||||
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
return PackRunState.Create(runId, "hash-123", plan, failurePolicy, timestamp, steps, timestamp);
|
||||
}
|
||||
|
||||
private static string CreateTempDirectory()
|
||||
|
||||
@@ -0,0 +1,211 @@
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class PackRunApprovalDecisionServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ApplyAsync_ApprovingLastGateSchedulesResume()
|
||||
{
|
||||
var plan = TestPlanFactory.CreatePlan();
|
||||
var state = TestPlanFactory.CreateState("run-1", plan);
|
||||
var approval = new PackRunApprovalState(
|
||||
"security-review",
|
||||
new[] { "Packs.Approve" },
|
||||
new[] { "step-a" },
|
||||
Array.Empty<string>(),
|
||||
null,
|
||||
DateTimeOffset.UtcNow.AddMinutes(-5),
|
||||
PackRunApprovalStatus.Pending);
|
||||
|
||||
var approvalStore = new InMemoryApprovalStore(new Dictionary<string, IReadOnlyList<PackRunApprovalState>>
|
||||
{
|
||||
["run-1"] = new List<PackRunApprovalState> { approval }
|
||||
});
|
||||
var stateStore = new InMemoryStateStore(new Dictionary<string, PackRunState>
|
||||
{
|
||||
["run-1"] = state
|
||||
});
|
||||
var scheduler = new RecordingScheduler();
|
||||
|
||||
var service = new PackRunApprovalDecisionService(
|
||||
approvalStore,
|
||||
stateStore,
|
||||
scheduler,
|
||||
NullLogger<PackRunApprovalDecisionService>.Instance);
|
||||
|
||||
var result = await service.ApplyAsync(
|
||||
new PackRunApprovalDecisionRequest("run-1", "security-review", PackRunApprovalDecisionType.Approved, "approver@example.com", "LGTM"),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal("resumed", result.Status);
|
||||
Assert.True(scheduler.ScheduledContexts.TryGetValue("run-1", out var context));
|
||||
Assert.Equal(plan.Hash, context!.Plan.Hash);
|
||||
Assert.Equal(PackRunApprovalStatus.Approved, approvalStore.LastUpdated?.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ApplyAsync_ReturnsNotFoundWhenStateMissing()
|
||||
{
|
||||
var approvalStore = new InMemoryApprovalStore(new Dictionary<string, IReadOnlyList<PackRunApprovalState>>());
|
||||
var stateStore = new InMemoryStateStore(new Dictionary<string, PackRunState>());
|
||||
var scheduler = new RecordingScheduler();
|
||||
|
||||
var service = new PackRunApprovalDecisionService(
|
||||
approvalStore,
|
||||
stateStore,
|
||||
scheduler,
|
||||
NullLogger<PackRunApprovalDecisionService>.Instance);
|
||||
|
||||
var result = await service.ApplyAsync(
|
||||
new PackRunApprovalDecisionRequest("missing", "approval", PackRunApprovalDecisionType.Approved, "actor", null),
|
||||
CancellationToken.None);
|
||||
|
||||
Assert.Equal("not_found", result.Status);
|
||||
Assert.False(scheduler.ScheduledContexts.Any());
|
||||
}
|
||||
|
||||
private sealed class InMemoryApprovalStore : IPackRunApprovalStore
|
||||
{
|
||||
private readonly Dictionary<string, List<PackRunApprovalState>> _approvals;
|
||||
public PackRunApprovalState? LastUpdated { get; private set; }
|
||||
|
||||
public InMemoryApprovalStore(IDictionary<string, IReadOnlyList<PackRunApprovalState>> seed)
|
||||
{
|
||||
_approvals = seed.ToDictionary(
|
||||
pair => pair.Key,
|
||||
pair => pair.Value.ToList(),
|
||||
StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken)
|
||||
{
|
||||
_approvals[runId] = approvals.ToList();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_approvals.TryGetValue(runId, out var existing))
|
||||
{
|
||||
return Task.FromResult<IReadOnlyList<PackRunApprovalState>>(existing);
|
||||
}
|
||||
|
||||
return Task.FromResult<IReadOnlyList<PackRunApprovalState>>(Array.Empty<PackRunApprovalState>());
|
||||
}
|
||||
|
||||
public Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_approvals.TryGetValue(runId, out var list))
|
||||
{
|
||||
for (var i = 0; i < list.Count; i++)
|
||||
{
|
||||
if (string.Equals(list[i].ApprovalId, approval.ApprovalId, StringComparison.Ordinal))
|
||||
{
|
||||
list[i] = approval;
|
||||
LastUpdated = approval;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class InMemoryStateStore : IPackRunStateStore
|
||||
{
|
||||
private readonly Dictionary<string, PackRunState> _states;
|
||||
|
||||
public InMemoryStateStore(IDictionary<string, PackRunState> states)
|
||||
{
|
||||
_states = new Dictionary<string, PackRunState>(states, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken)
|
||||
{
|
||||
_states.TryGetValue(runId, out var state);
|
||||
return Task.FromResult(state);
|
||||
}
|
||||
|
||||
public Task SaveAsync(PackRunState state, CancellationToken cancellationToken)
|
||||
{
|
||||
_states[state.RunId] = state;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken)
|
||||
=> Task.FromResult<IReadOnlyList<PackRunState>>(_states.Values.ToList());
|
||||
}
|
||||
|
||||
private sealed class RecordingScheduler : IPackRunJobScheduler
|
||||
{
|
||||
public Dictionary<string, PackRunExecutionContext> ScheduledContexts { get; } = new(StringComparer.Ordinal);
|
||||
|
||||
public Task ScheduleAsync(PackRunExecutionContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ScheduledContexts[context.RunId] = context;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TestPlanFactory
|
||||
{
|
||||
public static TaskPackPlan CreatePlan()
|
||||
{
|
||||
var metadata = new TaskPackPlanMetadata("sample", "1.0.0", null, Array.Empty<string>());
|
||||
var parameters = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal);
|
||||
var step = new TaskPackPlanStep(
|
||||
Id: "step-a",
|
||||
TemplateId: "run/image",
|
||||
Name: "Run step",
|
||||
Type: "run",
|
||||
Enabled: true,
|
||||
Uses: "builtin/run",
|
||||
Parameters: parameters,
|
||||
ApprovalId: "security-review",
|
||||
GateMessage: null,
|
||||
Children: Array.Empty<TaskPackPlanStep>());
|
||||
|
||||
return new TaskPackPlan(
|
||||
metadata,
|
||||
new Dictionary<string, JsonNode?>(StringComparer.Ordinal),
|
||||
new[] { step },
|
||||
"hash-123",
|
||||
new[]
|
||||
{
|
||||
new TaskPackPlanApproval("security-review", new[] { "Packs.Approve" }, null, null)
|
||||
},
|
||||
Array.Empty<TaskPackPlanSecret>(),
|
||||
Array.Empty<TaskPackPlanOutput>(),
|
||||
new TaskPackPlanFailurePolicy(3, 30, false));
|
||||
}
|
||||
|
||||
public static PackRunState CreateState(string runId, TaskPackPlan plan)
|
||||
{
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
var steps = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal)
|
||||
{
|
||||
["step-a"] = new PackRunStepStateRecord(
|
||||
"step-a",
|
||||
PackRunStepKind.GateApproval,
|
||||
true,
|
||||
false,
|
||||
null,
|
||||
"security-review",
|
||||
null,
|
||||
PackRunStepExecutionStatus.Pending,
|
||||
0,
|
||||
null,
|
||||
null,
|
||||
null)
|
||||
};
|
||||
|
||||
return PackRunState.Create(runId, plan.Hash, plan, plan.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy, timestamp, steps, timestamp);
|
||||
}
|
||||
}
|
||||
@@ -129,7 +129,7 @@ public sealed class PackRunGateStateUpdaterTests
|
||||
StatusReason: reason);
|
||||
}
|
||||
|
||||
return PackRunState.Create("run-1", plan.Hash, graph.FailurePolicy, steps, RequestedAt);
|
||||
return PackRunState.Create("run-1", plan.Hash, plan, graph.FailurePolicy, RequestedAt, steps, RequestedAt);
|
||||
}
|
||||
|
||||
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
|
||||
|
||||
@@ -12,15 +12,27 @@ var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
builder.Services.Configure<TaskRunnerServiceOptions>(builder.Configuration.GetSection("TaskRunner"));
|
||||
builder.Services.AddSingleton<TaskPackManifestLoader>();
|
||||
builder.Services.AddSingleton<TaskPackPlanner>();
|
||||
builder.Services.AddSingleton<PackRunSimulationEngine>();
|
||||
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
|
||||
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
|
||||
return new FilePackRunStateStore(options.RunStatePath);
|
||||
});
|
||||
builder.Services.AddOpenApi();
|
||||
builder.Services.AddSingleton<TaskPackPlanner>();
|
||||
builder.Services.AddSingleton<PackRunSimulationEngine>();
|
||||
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
|
||||
builder.Services.AddSingleton<IPackRunApprovalStore>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
|
||||
return new FilePackRunApprovalStore(options.ApprovalStorePath);
|
||||
});
|
||||
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
|
||||
return new FilePackRunStateStore(options.RunStatePath);
|
||||
});
|
||||
builder.Services.AddSingleton(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
|
||||
return new FilesystemPackRunDispatcher(options.QueuePath, options.ArchivePath);
|
||||
});
|
||||
builder.Services.AddSingleton<IPackRunJobScheduler>(sp => sp.GetRequiredService<FilesystemPackRunDispatcher>());
|
||||
builder.Services.AddSingleton<PackRunApprovalDecisionService>();
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
@@ -67,11 +79,11 @@ app.MapPost("/v1/task-runner/simulations", async (
|
||||
return Results.Ok(response);
|
||||
}).WithName("SimulateTaskPack");
|
||||
|
||||
app.MapGet("/v1/task-runner/runs/{runId}", async (
|
||||
string runId,
|
||||
IPackRunStateStore stateStore,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
app.MapGet("/v1/task-runner/runs/{runId}", async (
|
||||
string runId,
|
||||
IPackRunStateStore stateStore,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(runId))
|
||||
{
|
||||
return Results.BadRequest(new { error = "runId is required." });
|
||||
@@ -83,10 +95,43 @@ app.MapGet("/v1/task-runner/runs/{runId}", async (
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(RunStateMapper.ToResponse(state));
|
||||
}).WithName("GetRunState");
|
||||
|
||||
app.MapGet("/", () => Results.Redirect("/openapi"));
|
||||
return Results.Ok(RunStateMapper.ToResponse(state));
|
||||
}).WithName("GetRunState");
|
||||
|
||||
app.MapPost("/v1/task-runner/runs/{runId}/approvals/{approvalId}", async (
|
||||
string runId,
|
||||
string approvalId,
|
||||
[FromBody] ApprovalDecisionDto request,
|
||||
PackRunApprovalDecisionService decisionService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Request body is required." });
|
||||
}
|
||||
|
||||
if (!Enum.TryParse<PackRunApprovalDecisionType>(request.Decision, ignoreCase: true, out var decisionType))
|
||||
{
|
||||
return Results.BadRequest(new { error = "Invalid decision. Expected approved, rejected, or expired." });
|
||||
}
|
||||
|
||||
var result = await decisionService.ApplyAsync(
|
||||
new PackRunApprovalDecisionRequest(runId, approvalId, decisionType, request.ActorId, request.Summary),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (ReferenceEquals(result, PackRunApprovalDecisionResult.NotFound))
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(new
|
||||
{
|
||||
status = result.Status,
|
||||
resumed = result.ShouldResume
|
||||
});
|
||||
}).WithName("ApplyApprovalDecision");
|
||||
|
||||
app.MapGet("/", () => Results.Redirect("/openapi"));
|
||||
|
||||
app.Run();
|
||||
|
||||
@@ -146,19 +191,21 @@ internal sealed record RunStateResponse(
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<RunStateStepResponse> Steps);
|
||||
|
||||
internal sealed record RunStateStepResponse(
|
||||
string StepId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
string Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
internal sealed record RunStateStepResponse(
|
||||
string StepId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
bool ContinueOnError,
|
||||
int? MaxParallel,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
string Status,
|
||||
int Attempts,
|
||||
DateTimeOffset? LastTransitionAt,
|
||||
DateTimeOffset? NextAttemptAt,
|
||||
string? StatusReason);
|
||||
|
||||
internal sealed record ApprovalDecisionDto(string Decision, string? ActorId, string? Summary);
|
||||
|
||||
internal static class SimulationMapper
|
||||
{
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
namespace StellaOps.TaskRunner.WebService;
|
||||
|
||||
public sealed class TaskRunnerServiceOptions
|
||||
{
|
||||
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
|
||||
}
|
||||
public sealed class TaskRunnerServiceOptions
|
||||
{
|
||||
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
|
||||
public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals");
|
||||
public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue");
|
||||
public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive");
|
||||
}
|
||||
|
||||
@@ -18,12 +18,14 @@ builder.Services.AddSingleton<IPackRunApprovalStore>(sp =>
|
||||
return new FilePackRunApprovalStore(options.Value.ApprovalStorePath);
|
||||
});
|
||||
|
||||
builder.Services.AddSingleton<IPackRunJobDispatcher>(sp =>
|
||||
builder.Services.AddSingleton(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
|
||||
var egressPolicy = sp.GetRequiredService<IEgressPolicy>();
|
||||
return new FilesystemPackRunDispatcher(options.Value.QueuePath, options.Value.ArchivePath, egressPolicy);
|
||||
});
|
||||
builder.Services.AddSingleton<IPackRunJobDispatcher>(sp => sp.GetRequiredService<FilesystemPackRunDispatcher>());
|
||||
builder.Services.AddSingleton<IPackRunJobScheduler>(sp => sp.GetRequiredService<FilesystemPackRunDispatcher>());
|
||||
|
||||
builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp =>
|
||||
{
|
||||
@@ -49,6 +51,7 @@ builder.Services.AddSingleton<IPackRunStepExecutor, NoopPackRunStepExecutor>();
|
||||
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
|
||||
builder.Services.AddSingleton<PackRunSimulationEngine>();
|
||||
builder.Services.AddSingleton<PackRunProcessor>();
|
||||
builder.Services.AddSingleton<IPackRunArtifactUploader, LoggingPackRunArtifactUploader>();
|
||||
builder.Services.AddHostedService<PackRunWorkerService>();
|
||||
|
||||
var host = builder.Build();
|
||||
|
||||
@@ -15,31 +15,34 @@ public sealed class PackRunWorkerService : BackgroundService
|
||||
|
||||
private readonly IPackRunJobDispatcher dispatcher;
|
||||
private readonly PackRunProcessor processor;
|
||||
private readonly PackRunWorkerOptions options;
|
||||
private readonly IPackRunStateStore stateStore;
|
||||
private readonly PackRunExecutionGraphBuilder graphBuilder;
|
||||
private readonly PackRunSimulationEngine simulationEngine;
|
||||
private readonly IPackRunStepExecutor executor;
|
||||
private readonly ILogger<PackRunWorkerService> logger;
|
||||
|
||||
public PackRunWorkerService(
|
||||
IPackRunJobDispatcher dispatcher,
|
||||
PackRunProcessor processor,
|
||||
IPackRunStateStore stateStore,
|
||||
PackRunExecutionGraphBuilder graphBuilder,
|
||||
PackRunSimulationEngine simulationEngine,
|
||||
IPackRunStepExecutor executor,
|
||||
IOptions<PackRunWorkerOptions> options,
|
||||
ILogger<PackRunWorkerService> logger)
|
||||
{
|
||||
this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher));
|
||||
this.processor = processor ?? throw new ArgumentNullException(nameof(processor));
|
||||
private readonly PackRunWorkerOptions options;
|
||||
private readonly IPackRunStateStore stateStore;
|
||||
private readonly PackRunExecutionGraphBuilder graphBuilder;
|
||||
private readonly PackRunSimulationEngine simulationEngine;
|
||||
private readonly IPackRunStepExecutor executor;
|
||||
private readonly IPackRunArtifactUploader artifactUploader;
|
||||
private readonly ILogger<PackRunWorkerService> logger;
|
||||
|
||||
public PackRunWorkerService(
|
||||
IPackRunJobDispatcher dispatcher,
|
||||
PackRunProcessor processor,
|
||||
IPackRunStateStore stateStore,
|
||||
PackRunExecutionGraphBuilder graphBuilder,
|
||||
PackRunSimulationEngine simulationEngine,
|
||||
IPackRunStepExecutor executor,
|
||||
IPackRunArtifactUploader artifactUploader,
|
||||
IOptions<PackRunWorkerOptions> options,
|
||||
ILogger<PackRunWorkerService> logger)
|
||||
{
|
||||
this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher));
|
||||
this.processor = processor ?? throw new ArgumentNullException(nameof(processor));
|
||||
this.stateStore = stateStore ?? throw new ArgumentNullException(nameof(stateStore));
|
||||
this.graphBuilder = graphBuilder ?? throw new ArgumentNullException(nameof(graphBuilder));
|
||||
this.simulationEngine = simulationEngine ?? throw new ArgumentNullException(nameof(simulationEngine));
|
||||
this.executor = executor ?? throw new ArgumentNullException(nameof(executor));
|
||||
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
this.graphBuilder = graphBuilder ?? throw new ArgumentNullException(nameof(graphBuilder));
|
||||
this.simulationEngine = simulationEngine ?? throw new ArgumentNullException(nameof(simulationEngine));
|
||||
this.executor = executor ?? throw new ArgumentNullException(nameof(executor));
|
||||
this.artifactUploader = artifactUploader ?? throw new ArgumentNullException(nameof(artifactUploader));
|
||||
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
@@ -100,14 +103,15 @@ public sealed class PackRunWorkerService : BackgroundService
|
||||
var updatedState = await ExecuteGraphAsync(context, graph, state, cancellationToken).ConfigureAwait(false);
|
||||
await stateStore.SaveAsync(updatedState, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (updatedState.Steps.Values.All(step => step.Status is PackRunStepExecutionStatus.Succeeded or PackRunStepExecutionStatus.Skipped))
|
||||
{
|
||||
logger.LogInformation("Run {RunId} finished successfully.", context.RunId);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.LogInformation("Run {RunId} paused with pending work.", context.RunId);
|
||||
}
|
||||
if (updatedState.Steps.Values.All(step => step.Status is PackRunStepExecutionStatus.Succeeded or PackRunStepExecutionStatus.Skipped))
|
||||
{
|
||||
logger.LogInformation("Run {RunId} finished successfully.", context.RunId);
|
||||
await artifactUploader.UploadAsync(context, updatedState, context.Plan.Outputs, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
logger.LogInformation("Run {RunId} paused with pending work.", context.RunId);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<PackRunState> CreateInitialStateAsync(
|
||||
@@ -164,7 +168,14 @@ public sealed class PackRunWorkerService : BackgroundService
|
||||
}
|
||||
|
||||
var failurePolicy = graph.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var state = PackRunState.Create(context.RunId, context.Plan.Hash, failurePolicy, stepRecords, timestamp);
|
||||
var state = PackRunState.Create(
|
||||
context.RunId,
|
||||
context.Plan.Hash,
|
||||
context.Plan,
|
||||
failurePolicy,
|
||||
context.RequestedAt,
|
||||
stepRecords,
|
||||
timestamp);
|
||||
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
|
||||
return state;
|
||||
}
|
||||
|
||||
@@ -18,10 +18,11 @@
|
||||
|
||||
## Sprint 43 – Approvals, Notifications, Hardening
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| TASKRUN-43-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-42-001, NOTIFY-SVC-40-001 | Implement approvals workflow (resume after approval), notifications integration, remote artifact uploads, chaos resilience, secret injection, and audit logs. | Approvals/resume flow validated; notifications emitted; chaos tests documented; secrets redacted in logs; audit logs complete. |
|
||||
> 2025-10-29: Starting approvals orchestration — defining persistence/workflow scaffolding, integrating plan insights for notifications, and staging resume hooks.
|
||||
> 2025-10-29: Added approval coordinator + policy notification bridge with unit tests; ready to wire into worker execution/resume path.
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| TASKRUN-43-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-42-001, NOTIFY-SVC-40-001 | Implement approvals workflow (resume after approval), notifications integration, remote artifact uploads, chaos resilience, secret injection, and audit logs. | Approvals/resume flow validated; notifications emitted; chaos tests documented; secrets redacted in logs; audit logs complete. |
|
||||
> 2025-10-29: Starting approvals orchestration — defining persistence/workflow scaffolding, integrating plan insights for notifications, and staging resume hooks.
|
||||
> 2025-10-29: Added approval coordinator + policy notification bridge with unit tests; ready to wire into worker execution/resume path.
|
||||
> 2025-11-06: Added approval decision API with resume requeue, persisted plan snapshots, and artifact uploader hook (logging backend pending).
|
||||
|
||||
## Authority-Backed Scopes & Tenancy (Epic 14)
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|
||||
Reference in New Issue
Block a user