Implement MongoDB-based storage for Pack Run approval, artifact, log, and state management
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Added MongoPackRunApprovalStore for managing approval states with MongoDB.
- Introduced MongoPackRunArtifactUploader for uploading and storing artifacts.
- Created MongoPackRunLogStore to handle logging of pack run events.
- Developed MongoPackRunStateStore for persisting and retrieving pack run states.
- Implemented unit tests for MongoDB stores to ensure correct functionality.
- Added MongoTaskRunnerTestContext for setting up MongoDB test environment.
- Enhanced PackRunStateFactory to correctly initialize state with gate reasons.
This commit is contained in:
master
2025-11-07 10:01:35 +02:00
parent e5ffcd6535
commit a1ce3f74fa
122 changed files with 8730 additions and 914 deletions

View File

@@ -13,6 +13,8 @@ public sealed class AdvisoryAiServiceOptions
public AdvisoryAiQueueOptions Queue { get; set; } = new();
public AdvisoryAiStorageOptions Storage { get; set; } = new();
internal string ResolveQueueDirectory(string contentRoot)
{
ArgumentException.ThrowIfNullOrWhiteSpace(contentRoot);
@@ -31,9 +33,45 @@ public sealed class AdvisoryAiServiceOptions
Directory.CreateDirectory(path);
return path;
}
internal string ResolvePlanCacheDirectory(string contentRoot)
=> Storage.ResolvePlanCacheDirectory(contentRoot);
internal string ResolveOutputDirectory(string contentRoot)
=> Storage.ResolveOutputDirectory(contentRoot);
}
public sealed class AdvisoryAiQueueOptions
{
public string DirectoryPath { get; set; } = Path.Combine("data", "advisory-ai", "queue");
}
public sealed class AdvisoryAiStorageOptions
{
public string PlanCacheDirectory { get; set; } = Path.Combine("data", "advisory-ai", "plans");
public string OutputDirectory { get; set; } = Path.Combine("data", "advisory-ai", "outputs");
internal string ResolvePlanCacheDirectory(string contentRoot)
=> Resolve(contentRoot, PlanCacheDirectory, Path.Combine("data", "advisory-ai", "plans"));
internal string ResolveOutputDirectory(string contentRoot)
=> Resolve(contentRoot, OutputDirectory, Path.Combine("data", "advisory-ai", "outputs"));
private static string Resolve(string contentRoot, string configuredPath, string fallback)
{
ArgumentException.ThrowIfNullOrWhiteSpace(contentRoot);
var path = string.IsNullOrWhiteSpace(configuredPath)
? fallback
: configuredPath;
if (!Path.IsPathFullyQualified(path))
{
path = Path.GetFullPath(Path.Combine(contentRoot, path));
}
Directory.CreateDirectory(path);
return path;
}
}

View File

@@ -41,6 +41,17 @@ internal static class AdvisoryAiServiceOptionsValidator
options.Queue.DirectoryPath = Path.Combine("data", "advisory-ai", "queue");
}
options.Storage ??= new AdvisoryAiStorageOptions();
if (string.IsNullOrWhiteSpace(options.Storage.PlanCacheDirectory))
{
options.Storage.PlanCacheDirectory = Path.Combine("data", "advisory-ai", "plans");
}
if (string.IsNullOrWhiteSpace(options.Storage.OutputDirectory))
{
options.Storage.OutputDirectory = Path.Combine("data", "advisory-ai", "outputs");
}
error = null;
return true;
}

View File

@@ -0,0 +1,177 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Guardrails;
using StellaOps.AdvisoryAI.Outputs;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Prompting;
namespace StellaOps.AdvisoryAI.Hosting;
internal sealed class FileSystemAdvisoryOutputStore : IAdvisoryOutputStore
{
private readonly string _rootDirectory;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web);
private readonly ILogger<FileSystemAdvisoryOutputStore> _logger;
public FileSystemAdvisoryOutputStore(
IOptions<AdvisoryAiServiceOptions> serviceOptions,
ILogger<FileSystemAdvisoryOutputStore> logger)
{
ArgumentNullException.ThrowIfNull(serviceOptions);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
var options = serviceOptions.Value ?? throw new InvalidOperationException("Advisory AI options are required.");
AdvisoryAiServiceOptionsValidator.Validate(options);
_rootDirectory = options.ResolveOutputDirectory(AppContext.BaseDirectory);
Directory.CreateDirectory(_rootDirectory);
}
public async Task SaveAsync(AdvisoryPipelineOutput output, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(output);
var path = GetOutputPath(output.CacheKey, output.TaskType, output.Profile);
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
var envelope = OutputEnvelope.FromOutput(output);
var tmpPath = $"{path}.tmp";
await using (var stream = new FileStream(tmpPath, FileMode.Create, FileAccess.Write, FileShare.None))
{
await JsonSerializer.SerializeAsync(stream, envelope, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
}
File.Move(tmpPath, path, overwrite: true);
}
public async Task<AdvisoryPipelineOutput?> TryGetAsync(string cacheKey, AdvisoryTaskType taskType, string profile, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
ArgumentException.ThrowIfNullOrWhiteSpace(profile);
var path = GetOutputPath(cacheKey, taskType, profile);
if (!File.Exists(path))
{
return null;
}
try
{
await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);
var envelope = await JsonSerializer
.DeserializeAsync<OutputEnvelope>(stream, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
return envelope?.ToOutput();
}
catch (Exception ex) when (ex is IOException or JsonException)
{
_logger.LogWarning(ex, "Failed to read advisory output file {Path}", path);
return null;
}
}
private string GetOutputPath(string cacheKey, AdvisoryTaskType taskType, string profile)
{
var safeKey = Sanitize(cacheKey);
var safeProfile = Sanitize(profile);
var taskDirectory = Path.Combine(_rootDirectory, taskType.ToString().ToLowerInvariant(), safeProfile);
return Path.Combine(taskDirectory, $"{safeKey}.json");
}
private static string Sanitize(string value)
{
var invalid = Path.GetInvalidFileNameChars();
var buffer = new char[value.Length];
var length = 0;
foreach (var ch in value)
{
buffer[length++] = invalid.Contains(ch) ? '_' : ch;
}
return new string(buffer, 0, length);
}
private sealed record OutputEnvelope(
string CacheKey,
AdvisoryTaskType TaskType,
string Profile,
string Prompt,
List<AdvisoryPromptCitation> Citations,
Dictionary<string, string> Metadata,
GuardrailEnvelope Guardrail,
ProvenanceEnvelope Provenance,
DateTimeOffset GeneratedAtUtc,
bool PlanFromCache)
{
public static OutputEnvelope FromOutput(AdvisoryPipelineOutput output)
=> new(
output.CacheKey,
output.TaskType,
output.Profile,
output.Prompt,
output.Citations.ToList(),
output.Metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal),
GuardrailEnvelope.FromResult(output.Guardrail),
ProvenanceEnvelope.FromProvenance(output.Provenance),
output.GeneratedAtUtc,
output.PlanFromCache);
public AdvisoryPipelineOutput ToOutput()
{
var guardrail = Guardrail.ToResult();
var citations = Citations.ToImmutableArray();
var metadata = Metadata.ToImmutableDictionary(StringComparer.Ordinal);
return new AdvisoryPipelineOutput(
CacheKey,
TaskType,
Profile,
Prompt,
citations,
metadata,
guardrail,
Provenance.ToProvenance(),
GeneratedAtUtc,
PlanFromCache);
}
}
private sealed record GuardrailEnvelope(
bool Blocked,
string SanitizedPrompt,
List<AdvisoryGuardrailViolation> Violations,
Dictionary<string, string> Metadata)
{
public static GuardrailEnvelope FromResult(AdvisoryGuardrailResult result)
=> new(
result.Blocked,
result.SanitizedPrompt,
result.Violations.ToList(),
result.Metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal));
public AdvisoryGuardrailResult ToResult()
=> Blocked
? AdvisoryGuardrailResult.Reject(SanitizedPrompt, Violations, Metadata.ToImmutableDictionary(StringComparer.Ordinal))
: AdvisoryGuardrailResult.Allowed(SanitizedPrompt, Metadata.ToImmutableDictionary(StringComparer.Ordinal));
}
private sealed record ProvenanceEnvelope(
string InputDigest,
string OutputHash,
List<string> Signatures)
{
public static ProvenanceEnvelope FromProvenance(AdvisoryDsseProvenance provenance)
=> new(
provenance.InputDigest,
provenance.OutputHash,
provenance.Signatures.ToList());
public AdvisoryDsseProvenance ToProvenance()
=> new(InputDigest, OutputHash, Signatures.ToImmutableArray());
}
}

View File

@@ -0,0 +1,462 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Abstractions;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Context;
using StellaOps.AdvisoryAI.Documents;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Tools;
namespace StellaOps.AdvisoryAI.Hosting;
internal sealed class FileSystemAdvisoryPlanCache : IAdvisoryPlanCache
{
private readonly string _directory;
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web);
private readonly ILogger<FileSystemAdvisoryPlanCache> _logger;
private readonly TimeProvider _timeProvider;
private readonly TimeSpan _defaultTtl;
private readonly TimeSpan _cleanupInterval;
private DateTimeOffset _lastCleanup;
public FileSystemAdvisoryPlanCache(
IOptions<AdvisoryAiServiceOptions> serviceOptions,
IOptions<AdvisoryPlanCacheOptions> cacheOptions,
ILogger<FileSystemAdvisoryPlanCache> logger,
TimeProvider? timeProvider = null)
{
ArgumentNullException.ThrowIfNull(serviceOptions);
ArgumentNullException.ThrowIfNull(cacheOptions);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
var options = serviceOptions.Value ?? throw new InvalidOperationException("Advisory AI options are required.");
AdvisoryAiServiceOptionsValidator.Validate(options);
_directory = options.ResolvePlanCacheDirectory(AppContext.BaseDirectory);
Directory.CreateDirectory(_directory);
var cache = cacheOptions.Value ?? throw new InvalidOperationException("Plan cache options are required.");
if (cache.DefaultTimeToLive <= TimeSpan.Zero)
{
cache.DefaultTimeToLive = TimeSpan.FromMinutes(10);
}
if (cache.CleanupInterval <= TimeSpan.Zero)
{
cache.CleanupInterval = TimeSpan.FromMinutes(5);
}
_defaultTtl = cache.DefaultTimeToLive;
_cleanupInterval = cache.CleanupInterval;
_timeProvider = timeProvider ?? TimeProvider.System;
_lastCleanup = _timeProvider.GetUtcNow();
}
public async Task SetAsync(string cacheKey, AdvisoryTaskPlan plan, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
ArgumentNullException.ThrowIfNull(plan);
var now = _timeProvider.GetUtcNow();
await CleanupIfRequiredAsync(now, cancellationToken).ConfigureAwait(false);
var envelope = PlanEnvelope.FromPlan(plan, now + _defaultTtl);
var targetPath = GetPlanPath(cacheKey);
var tmpPath = $"{targetPath}.tmp";
await using (var stream = new FileStream(tmpPath, FileMode.Create, FileAccess.Write, FileShare.None))
{
await JsonSerializer.SerializeAsync(stream, envelope, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
}
File.Move(tmpPath, targetPath, overwrite: true);
}
public async Task<AdvisoryTaskPlan?> TryGetAsync(string cacheKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
var path = GetPlanPath(cacheKey);
if (!File.Exists(path))
{
return null;
}
try
{
await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);
var envelope = await JsonSerializer
.DeserializeAsync<PlanEnvelope>(stream, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
if (envelope is null)
{
return null;
}
var now = _timeProvider.GetUtcNow();
if (envelope.ExpiresAtUtc <= now)
{
TryDelete(path);
return null;
}
return envelope.ToPlan();
}
catch (Exception ex) when (ex is IOException or JsonException)
{
_logger.LogWarning(ex, "Failed to read advisory plan cache file {Path}", path);
TryDelete(path);
return null;
}
}
public Task RemoveAsync(string cacheKey, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
var path = GetPlanPath(cacheKey);
TryDelete(path);
return Task.CompletedTask;
}
private async Task CleanupIfRequiredAsync(DateTimeOffset now, CancellationToken cancellationToken)
{
if (now - _lastCleanup < _cleanupInterval)
{
return;
}
foreach (var file in Directory.EnumerateFiles(_directory, "*.json", SearchOption.TopDirectoryOnly))
{
try
{
await using var stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read);
var envelope = await JsonSerializer
.DeserializeAsync<PlanEnvelope>(stream, _serializerOptions, cancellationToken)
.ConfigureAwait(false);
if (envelope is null || envelope.ExpiresAtUtc <= now)
{
TryDelete(file);
}
}
catch (Exception ex) when (ex is IOException or JsonException)
{
_logger.LogDebug(ex, "Failed to inspect advisory plan cache file {Path}", file);
TryDelete(file);
}
}
_lastCleanup = now;
}
private string GetPlanPath(string cacheKey)
{
var safeName = Sanitize(cacheKey);
return Path.Combine(_directory, $"{safeName}.json");
}
private static string Sanitize(string value)
{
var invalid = Path.GetInvalidFileNameChars();
var builder = new char[value.Length];
var length = 0;
foreach (var ch in value)
{
builder[length++] = invalid.Contains(ch) ? '_' : ch;
}
return new string(builder, 0, length);
}
private void TryDelete(string path)
{
try
{
File.Delete(path);
}
catch (IOException ex)
{
_logger.LogDebug(ex, "Failed to delete advisory plan cache file {Path}", path);
}
}
private sealed record PlanEnvelope(
AdvisoryTaskRequestEnvelope Request,
string CacheKey,
string PromptTemplate,
List<AdvisoryChunkEnvelope> StructuredChunks,
List<VectorResultEnvelope> VectorResults,
SbomContextEnvelope? SbomContext,
DependencyAnalysisEnvelope? DependencyAnalysis,
AdvisoryTaskBudget Budget,
Dictionary<string, string> Metadata,
DateTimeOffset ExpiresAtUtc)
{
public static PlanEnvelope FromPlan(AdvisoryTaskPlan plan, DateTimeOffset expiry)
=> new(
AdvisoryTaskRequestEnvelope.FromRequest(plan.Request),
plan.CacheKey,
plan.PromptTemplate,
plan.StructuredChunks.Select(AdvisoryChunkEnvelope.FromChunk).ToList(),
plan.VectorResults.Select(VectorResultEnvelope.FromResult).ToList(),
plan.SbomContext is null ? null : SbomContextEnvelope.FromContext(plan.SbomContext),
plan.DependencyAnalysis is null ? null : DependencyAnalysisEnvelope.FromAnalysis(plan.DependencyAnalysis),
plan.Budget,
plan.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal),
expiry);
public AdvisoryTaskPlan ToPlan()
{
var chunks = StructuredChunks
.Select(static chunk => chunk.ToChunk())
.ToImmutableArray();
var vectors = VectorResults
.Select(static result => result.ToResult())
.ToImmutableArray();
var sbom = SbomContext?.ToContext();
var dependency = DependencyAnalysis?.ToAnalysis();
var metadata = Metadata.ToImmutableDictionary(StringComparer.Ordinal);
return new AdvisoryTaskPlan(
Request.ToRequest(),
CacheKey,
PromptTemplate,
chunks,
vectors,
sbom,
dependency,
Budget,
metadata);
}
}
private sealed record AdvisoryTaskRequestEnvelope(
AdvisoryTaskType TaskType,
string AdvisoryKey,
string? ArtifactId,
string? ArtifactPurl,
string? PolicyVersion,
string Profile,
IReadOnlyList<string>? PreferredSections,
bool ForceRefresh)
{
public static AdvisoryTaskRequestEnvelope FromRequest(AdvisoryTaskRequest request)
=> new(
request.TaskType,
request.AdvisoryKey,
request.ArtifactId,
request.ArtifactPurl,
request.PolicyVersion,
request.Profile,
request.PreferredSections?.ToArray(),
request.ForceRefresh);
public AdvisoryTaskRequest ToRequest()
=> new(
TaskType,
AdvisoryKey,
ArtifactId,
ArtifactPurl,
PolicyVersion,
Profile,
PreferredSections,
ForceRefresh);
}
private sealed record AdvisoryChunkEnvelope(
string DocumentId,
string ChunkId,
string Section,
string ParagraphId,
string Text,
Dictionary<string, string> Metadata)
{
public static AdvisoryChunkEnvelope FromChunk(AdvisoryChunk chunk)
=> new(
chunk.DocumentId,
chunk.ChunkId,
chunk.Section,
chunk.ParagraphId,
chunk.Text,
chunk.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal));
public AdvisoryChunk ToChunk()
=> AdvisoryChunk.Create(
DocumentId,
ChunkId,
Section,
ParagraphId,
Text,
Metadata);
}
private sealed record VectorResultEnvelope(string Query, List<VectorMatchEnvelope> Matches)
{
public static VectorResultEnvelope FromResult(AdvisoryVectorResult result)
=> new(
result.Query,
result.Matches.Select(VectorMatchEnvelope.FromMatch).ToList());
public AdvisoryVectorResult ToResult()
=> new(Query, Matches.Select(static match => match.ToMatch()).ToImmutableArray());
}
private sealed record VectorMatchEnvelope(
string DocumentId,
string ChunkId,
string Text,
double Score,
Dictionary<string, string> Metadata)
{
public static VectorMatchEnvelope FromMatch(VectorRetrievalMatch match)
=> new(
match.DocumentId,
match.ChunkId,
match.Text,
match.Score,
match.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal));
public VectorRetrievalMatch ToMatch()
=> new(DocumentId, ChunkId, Text, Score, Metadata);
}
private sealed record SbomContextEnvelope(
string ArtifactId,
string? Purl,
List<SbomVersionTimelineEntryEnvelope> VersionTimeline,
List<SbomDependencyPathEnvelope> DependencyPaths,
Dictionary<string, string> EnvironmentFlags,
SbomBlastRadiusEnvelope? BlastRadius,
Dictionary<string, string> Metadata)
{
public static SbomContextEnvelope FromContext(SbomContextResult context)
=> new(
context.ArtifactId,
context.Purl,
context.VersionTimeline.Select(SbomVersionTimelineEntryEnvelope.FromEntry).ToList(),
context.DependencyPaths.Select(SbomDependencyPathEnvelope.FromPath).ToList(),
context.EnvironmentFlags.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal),
context.BlastRadius is null ? null : SbomBlastRadiusEnvelope.FromBlastRadius(context.BlastRadius),
context.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal));
public SbomContextResult ToContext()
=> SbomContextResult.Create(
ArtifactId,
Purl,
VersionTimeline.Select(static entry => entry.ToEntry()),
DependencyPaths.Select(static path => path.ToPath()),
EnvironmentFlags,
BlastRadius?.ToBlastRadius(),
Metadata);
}
private sealed record SbomVersionTimelineEntryEnvelope(
string Version,
DateTimeOffset FirstObserved,
DateTimeOffset? LastObserved,
string Status,
string Source)
{
public static SbomVersionTimelineEntryEnvelope FromEntry(SbomVersionTimelineEntry entry)
=> new(entry.Version, entry.FirstObserved, entry.LastObserved, entry.Status, entry.Source);
public SbomVersionTimelineEntry ToEntry()
=> new(Version, FirstObserved, LastObserved, Status, Source);
}
private sealed record SbomDependencyPathEnvelope(
List<SbomDependencyNodeEnvelope> Nodes,
bool IsRuntime,
string? Source,
Dictionary<string, string> Metadata)
{
public static SbomDependencyPathEnvelope FromPath(SbomDependencyPath path)
=> new(
path.Nodes.Select(SbomDependencyNodeEnvelope.FromNode).ToList(),
path.IsRuntime,
path.Source,
path.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal));
public SbomDependencyPath ToPath()
=> new(
Nodes.Select(static node => node.ToNode()),
IsRuntime,
Source,
Metadata);
}
private sealed record SbomDependencyNodeEnvelope(string Identifier, string? Version)
{
public static SbomDependencyNodeEnvelope FromNode(SbomDependencyNode node)
=> new(node.Identifier, node.Version);
public SbomDependencyNode ToNode()
=> new(Identifier, Version);
}
private sealed record SbomBlastRadiusEnvelope(
int ImpactedAssets,
int ImpactedWorkloads,
int ImpactedNamespaces,
double? ImpactedPercentage,
Dictionary<string, string> Metadata)
{
public static SbomBlastRadiusEnvelope FromBlastRadius(SbomBlastRadiusSummary blastRadius)
=> new(
blastRadius.ImpactedAssets,
blastRadius.ImpactedWorkloads,
blastRadius.ImpactedNamespaces,
blastRadius.ImpactedPercentage,
blastRadius.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal));
public SbomBlastRadiusSummary ToBlastRadius()
=> new(
ImpactedAssets,
ImpactedWorkloads,
ImpactedNamespaces,
ImpactedPercentage,
Metadata);
}
private sealed record DependencyAnalysisEnvelope(
string ArtifactId,
List<DependencyNodeSummaryEnvelope> Nodes,
Dictionary<string, string> Metadata)
{
public static DependencyAnalysisEnvelope FromAnalysis(DependencyAnalysisResult analysis)
=> new(
analysis.ArtifactId,
analysis.Nodes.Select(DependencyNodeSummaryEnvelope.FromNode).ToList(),
analysis.Metadata.ToDictionary(static p => p.Key, static p => p.Value, StringComparer.Ordinal));
public DependencyAnalysisResult ToAnalysis()
=> DependencyAnalysisResult.Create(
ArtifactId,
Nodes.Select(static node => node.ToNode()),
Metadata);
}
private sealed record DependencyNodeSummaryEnvelope(
string Identifier,
List<string> Versions,
int RuntimeOccurrences,
int DevelopmentOccurrences)
{
public static DependencyNodeSummaryEnvelope FromNode(DependencyNodeSummary node)
=> new(
node.Identifier,
node.Versions.ToList(),
node.RuntimeOccurrences,
node.DevelopmentOccurrences);
public DependencyNodeSummary ToNode()
=> new(Identifier, Versions, RuntimeOccurrences, DevelopmentOccurrences);
}
}

View File

@@ -3,14 +3,16 @@ using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.DependencyInjection;
using StellaOps.AdvisoryAI.Providers;
using StellaOps.AdvisoryAI.Queue;
namespace StellaOps.AdvisoryAI.Hosting;
public static class ServiceCollectionExtensions
{
using StellaOps.AdvisoryAI.Outputs;
namespace StellaOps.AdvisoryAI.Hosting;
public static class ServiceCollectionExtensions
{
public static IServiceCollection AddAdvisoryAiCore(
this IServiceCollection services,
IConfiguration configuration,
@@ -43,6 +45,8 @@ public static class ServiceCollectionExtensions
services.AddAdvisoryPipelineInfrastructure();
services.Replace(ServiceDescriptor.Singleton<IAdvisoryTaskQueue, FileSystemAdvisoryTaskQueue>());
services.Replace(ServiceDescriptor.Singleton<IAdvisoryPlanCache, FileSystemAdvisoryPlanCache>());
services.Replace(ServiceDescriptor.Singleton<IAdvisoryOutputStore, FileSystemAdvisoryOutputStore>());
services.TryAddSingleton<AdvisoryAiMetrics>();
return services;

View File

@@ -1,27 +1,66 @@
using System.Collections.Generic;
using System.Linq;
using StellaOps.AdvisoryAI.Guardrails;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Outputs;
namespace StellaOps.AdvisoryAI.WebService.Contracts;
public sealed record AdvisoryOutputResponse(
internal sealed record AdvisoryOutputResponse(
string CacheKey,
AdvisoryTaskType TaskType,
string TaskType,
string Profile,
string OutputHash,
bool GuardrailBlocked,
IReadOnlyCollection<AdvisoryGuardrailViolationResponse> GuardrailViolations,
IReadOnlyDictionary<string, string> GuardrailMetadata,
string Prompt,
IReadOnlyCollection<AdvisoryCitationResponse> Citations,
IReadOnlyList<AdvisoryOutputCitation> Citations,
IReadOnlyDictionary<string, string> Metadata,
AdvisoryOutputGuardrail Guardrail,
AdvisoryOutputProvenance Provenance,
DateTimeOffset GeneratedAtUtc,
bool PlanFromCache);
public sealed record AdvisoryGuardrailViolationResponse(string Code, string Message)
bool PlanFromCache)
{
public static AdvisoryGuardrailViolationResponse From(AdvisoryGuardrailViolation violation)
=> new(violation.Code, violation.Message);
public static AdvisoryOutputResponse FromDomain(AdvisoryPipelineOutput output)
=> new(
output.CacheKey,
output.TaskType.ToString(),
output.Profile,
output.Prompt,
output.Citations
.Select(citation => new AdvisoryOutputCitation(citation.Index, citation.DocumentId, citation.ChunkId))
.ToList(),
output.Metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal),
AdvisoryOutputGuardrail.FromDomain(output.Guardrail),
AdvisoryOutputProvenance.FromDomain(output.Provenance),
output.GeneratedAtUtc,
output.PlanFromCache);
}
public sealed record AdvisoryCitationResponse(int Index, string DocumentId, string ChunkId);
internal sealed record AdvisoryOutputCitation(int Index, string DocumentId, string ChunkId);
internal sealed record AdvisoryOutputGuardrail(
bool Blocked,
string SanitizedPrompt,
IReadOnlyList<AdvisoryOutputGuardrailViolation> Violations,
IReadOnlyDictionary<string, string> Metadata)
{
public static AdvisoryOutputGuardrail FromDomain(AdvisoryGuardrailResult result)
=> new(
result.Blocked,
result.SanitizedPrompt,
result.Violations
.Select(violation => new AdvisoryOutputGuardrailViolation(violation.Code, violation.Message))
.ToList(),
result.Metadata.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal));
}
internal sealed record AdvisoryOutputGuardrailViolation(string Code, string Message);
internal sealed record AdvisoryOutputProvenance(
string InputDigest,
string OutputHash,
IReadOnlyList<string> Signatures)
{
public static AdvisoryOutputProvenance FromDomain(AdvisoryDsseProvenance provenance)
=> new(
provenance.InputDigest,
provenance.OutputHash,
provenance.Signatures.ToArray());
}

View File

@@ -1,3 +1,4 @@
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Threading.RateLimiting;
@@ -9,10 +10,13 @@ using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Diagnostics;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Metrics;
using StellaOps.AdvisoryAI.Outputs;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Queue;
using StellaOps.AdvisoryAI.WebService.Contracts;
var builder = WebApplication.CreateBuilder(args);
@@ -72,6 +76,9 @@ app.MapPost("/v1/advisory-ai/pipeline/{taskType}", HandleSinglePlan)
app.MapPost("/v1/advisory-ai/pipeline:batch", HandleBatchPlans)
.RequireRateLimiting("advisory-ai");
app.MapGet("/v1/advisory-ai/outputs/{cacheKey}", HandleGetOutput)
.RequireRateLimiting("advisory-ai");
app.Run();
static async Task<IResult> HandleSinglePlan(
@@ -85,6 +92,10 @@ static async Task<IResult> HandleSinglePlan(
AdvisoryPipelineMetrics pipelineMetrics,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.plan_request", ActivityKind.Server);
activity?.SetTag("advisory.task_type", taskType);
activity?.SetTag("advisory.advisory_key", request.AdvisoryKey);
if (!Enum.TryParse<AdvisoryTaskType>(taskType, ignoreCase: true, out var parsedType))
{
return Results.BadRequest(new { error = $"Unknown task type '{taskType}'." });
@@ -103,6 +114,7 @@ static async Task<IResult> HandleSinglePlan(
var normalizedRequest = request with { TaskType = parsedType };
var taskRequest = normalizedRequest.ToTaskRequest();
var plan = await orchestrator.CreatePlanAsync(taskRequest, cancellationToken).ConfigureAwait(false);
activity?.SetTag("advisory.plan_cache_key", plan.CacheKey);
await planCache.SetAsync(plan.CacheKey, plan, cancellationToken).ConfigureAwait(false);
await taskQueue.EnqueueAsync(new AdvisoryTaskQueueMessage(plan.CacheKey, plan.Request), cancellationToken).ConfigureAwait(false);
@@ -125,6 +137,9 @@ static async Task<IResult> HandleBatchPlans(
AdvisoryPipelineMetrics pipelineMetrics,
CancellationToken cancellationToken)
{
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.plan_batch", ActivityKind.Server);
activity?.SetTag("advisory.batch_size", batchRequest.Requests.Count);
if (batchRequest.Requests.Count == 0)
{
return Results.BadRequest(new { error = "At least one request must be supplied." });
@@ -153,6 +168,12 @@ static async Task<IResult> HandleBatchPlans(
var normalizedRequest = item with { TaskType = parsedType };
var taskRequest = normalizedRequest.ToTaskRequest();
var plan = await orchestrator.CreatePlanAsync(taskRequest, cancellationToken).ConfigureAwait(false);
activity?.AddEvent(new ActivityEvent("advisory.plan.created", tags: new ActivityTagsCollection
{
{ "advisory.task_type", plan.Request.TaskType.ToString() },
{ "advisory.advisory_key", plan.Request.AdvisoryKey },
{ "advisory.plan_cache_key", plan.CacheKey }
}));
await planCache.SetAsync(plan.CacheKey, plan, cancellationToken).ConfigureAwait(false);
await taskQueue.EnqueueAsync(new AdvisoryTaskQueueMessage(plan.CacheKey, plan.Request), cancellationToken).ConfigureAwait(false);
@@ -167,6 +188,37 @@ static async Task<IResult> HandleBatchPlans(
return Results.Ok(results);
}
static async Task<IResult> HandleGetOutput(
HttpContext httpContext,
string cacheKey,
string taskType,
string? profile,
IAdvisoryOutputStore outputStore,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(outputStore);
if (!Enum.TryParse<AdvisoryTaskType>(taskType, ignoreCase: true, out var parsedTaskType))
{
return Results.BadRequest(new { error = $"Unknown task type '{taskType}'." });
}
if (!EnsureAuthorized(httpContext, parsedTaskType))
{
return Results.StatusCode(StatusCodes.Status403Forbidden);
}
var resolvedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile!.Trim();
var output = await outputStore.TryGetAsync(cacheKey, parsedTaskType, resolvedProfile, cancellationToken)
.ConfigureAwait(false);
if (output is null)
{
return Results.NotFound(new { error = "Output not found." });
}
return Results.Ok(AdvisoryOutputResponse.FromDomain(output));
}
static bool EnsureAuthorized(HttpContext context, AdvisoryTaskType taskType)
{
if (!context.Request.Headers.TryGetValue("X-StellaOps-Scopes", out var scopes))

View File

@@ -5,7 +5,7 @@ using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Worker.Services;
var builder = Host.CreateApplicationBuilder(args);
var builder = Microsoft.Extensions.Hosting.Host.CreateApplicationBuilder(args);
builder.Configuration
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)

View File

@@ -1,6 +1,8 @@
using System.Diagnostics;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Diagnostics;
using StellaOps.AdvisoryAI.Metrics;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Queue;
@@ -50,8 +52,14 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
continue;
}
using var activity = AdvisoryAiActivitySource.Instance.StartActivity("advisory_ai.process", ActivityKind.Consumer);
activity?.SetTag("advisory.task_type", message.Request.TaskType.ToString());
activity?.SetTag("advisory.advisory_key", message.Request.AdvisoryKey);
var processStart = _timeProvider.GetTimestamp();
AdvisoryTaskPlan? plan = await _cache.TryGetAsync(message.PlanCacheKey, stoppingToken).ConfigureAwait(false);
var fromCache = plan is not null && !message.Request.ForceRefresh;
activity?.SetTag("advisory.plan_cache_hit", fromCache);
if (!fromCache)
{
@@ -68,8 +76,12 @@ internal sealed class AdvisoryTaskWorker : BackgroundService
message.Request.AdvisoryKey,
fromCache);
plan ??= throw new InvalidOperationException("Advisory task plan could not be generated.");
await _executor.ExecuteAsync(plan, message, fromCache, stoppingToken).ConfigureAwait(false);
_metrics.RecordPlanProcessed(message.Request.TaskType, fromCache);
var totalElapsed = _timeProvider.GetElapsedTime(processStart);
_metrics.RecordPipelineLatency(message.Request.TaskType, totalElapsed.TotalSeconds, fromCache);
activity?.SetTag("advisory.pipeline_latency_seconds", totalElapsed.TotalSeconds);
}
catch (OperationCanceledException)
{

View File

@@ -0,0 +1,8 @@
using System.Diagnostics;
namespace StellaOps.AdvisoryAI.Diagnostics;
internal static class AdvisoryAiActivitySource
{
public static readonly ActivitySource Instance = new("StellaOps.AdvisoryAI");
}

View File

@@ -1,4 +1,7 @@
using Microsoft.Extensions.Logging;
using System;
using System.Diagnostics;
using System.Linq;
using StellaOps.AdvisoryAI.Guardrails;
using StellaOps.AdvisoryAI.Outputs;
using StellaOps.AdvisoryAI.Orchestration;
@@ -53,27 +56,72 @@ internal sealed class AdvisoryPipelineExecutor : IAdvisoryPipelineExecutor
var prompt = await _promptAssembler.AssembleAsync(plan, cancellationToken).ConfigureAwait(false);
var guardrailResult = await _guardrailPipeline.EvaluateAsync(prompt, cancellationToken).ConfigureAwait(false);
var violationCount = guardrailResult.Violations.Length;
if (guardrailResult.Blocked)
{
_logger?.LogWarning(
"Guardrail blocked advisory pipeline output for {TaskType} on advisory {AdvisoryKey}",
"Guardrail blocked advisory pipeline output for {TaskType} on advisory {AdvisoryKey} with {ViolationCount} violations",
plan.Request.TaskType,
plan.Request.AdvisoryKey,
violationCount);
}
else if (violationCount > 0)
{
_logger?.LogInformation(
"Guardrail recorded {ViolationCount} advisory validation violations for {TaskType} on advisory {AdvisoryKey}",
violationCount,
plan.Request.TaskType,
plan.Request.AdvisoryKey);
}
var citationCoverage = CalculateCitationCoverage(plan, prompt);
var activity = Activity.Current;
activity?.SetTag("advisory.guardrail_blocked", guardrailResult.Blocked);
activity?.SetTag("advisory.validation_failures", violationCount);
activity?.SetTag("advisory.citation_coverage", citationCoverage);
_metrics.RecordGuardrailOutcome(plan.Request.TaskType, guardrailResult.Blocked, violationCount);
_metrics.RecordCitationCoverage(
plan.Request.TaskType,
citationCoverage,
prompt.Citations.Length,
plan.StructuredChunks.Length);
var generatedAt = _timeProvider.GetUtcNow();
var output = AdvisoryPipelineOutput.Create(plan, prompt, guardrailResult, generatedAt, planFromCache);
await _outputStore.SaveAsync(output, cancellationToken).ConfigureAwait(false);
_metrics.RecordGuardrailResult(plan.Request.TaskType, guardrailResult.Blocked);
_metrics.RecordOutputStored(plan.Request.TaskType, planFromCache, guardrailResult.Blocked);
_logger?.LogInformation(
"Stored advisory pipeline output {CacheKey} (task {TaskType}, cache:{CacheHit}, guardrail_blocked:{Blocked})",
"Stored advisory pipeline output {CacheKey} (task {TaskType}, cache:{CacheHit}, guardrail_blocked:{Blocked}, validation_failures:{ValidationFailures}, citation_coverage:{CitationCoverage:0.00})",
output.CacheKey,
plan.Request.TaskType,
planFromCache,
guardrailResult.Blocked);
guardrailResult.Blocked,
violationCount,
citationCoverage);
}
private static double CalculateCitationCoverage(AdvisoryTaskPlan plan, AdvisoryPrompt prompt)
{
var structuredCount = plan.StructuredChunks.Length;
if (structuredCount <= 0)
{
return 0d;
}
if (prompt.Citations.IsDefaultOrEmpty)
{
return 0d;
}
var uniqueCitations = prompt.Citations
.Select(citation => (citation.DocumentId, citation.ChunkId))
.Distinct()
.Count();
var coverage = (double)uniqueCitations / structuredCount;
return Math.Clamp(coverage, 0d, 1d);
}
}

View File

@@ -13,7 +13,10 @@ public sealed class AdvisoryPipelineMetrics : IDisposable
private readonly Counter<long> _plansProcessed;
private readonly Counter<long> _outputsStored;
private readonly Counter<long> _guardrailBlocks;
private readonly Counter<long> _validationFailures;
private readonly Histogram<double> _planBuildDuration;
private readonly Histogram<double> _pipelineLatencySeconds;
private readonly Histogram<double> _citationCoverageRatio;
private bool _disposed;
public AdvisoryPipelineMetrics(IMeterFactory meterFactory)
@@ -25,8 +28,11 @@ public sealed class AdvisoryPipelineMetrics : IDisposable
_plansQueued = _meter.CreateCounter<long>("advisory_plans_queued");
_plansProcessed = _meter.CreateCounter<long>("advisory_plans_processed");
_outputsStored = _meter.CreateCounter<long>("advisory_outputs_stored");
_guardrailBlocks = _meter.CreateCounter<long>("advisory_guardrail_blocks");
_guardrailBlocks = _meter.CreateCounter<long>("advisory_ai_guardrail_blocks_total");
_validationFailures = _meter.CreateCounter<long>("advisory_ai_validation_failures_total");
_planBuildDuration = _meter.CreateHistogram<double>("advisory_plan_build_duration_seconds");
_pipelineLatencySeconds = _meter.CreateHistogram<double>("advisory_ai_latency_seconds");
_citationCoverageRatio = _meter.CreateHistogram<double>("advisory_ai_citation_coverage_ratio");
}
public void RecordPlanCreated(double buildSeconds, AdvisoryTaskType taskType)
@@ -55,12 +61,40 @@ public sealed class AdvisoryPipelineMetrics : IDisposable
KeyValuePair.Create<string, object?>("guardrail_blocked", guardrailBlocked));
}
public void RecordGuardrailResult(AdvisoryTaskType taskType, bool blocked)
public void RecordGuardrailOutcome(AdvisoryTaskType taskType, bool blocked, int validationFailures)
{
if (blocked)
{
_guardrailBlocks.Add(1, KeyValuePair.Create<string, object?>("task_type", taskType.ToString()));
}
if (validationFailures > 0)
{
_validationFailures.Add(
validationFailures,
KeyValuePair.Create<string, object?>("task_type", taskType.ToString()));
}
}
public void RecordPipelineLatency(AdvisoryTaskType taskType, double seconds, bool planFromCache)
{
_pipelineLatencySeconds.Record(
seconds,
KeyValuePair.Create<string, object?>("task_type", taskType.ToString()),
KeyValuePair.Create<string, object?>("plan_cache_hit", planFromCache));
}
public void RecordCitationCoverage(
AdvisoryTaskType taskType,
double coverageRatio,
int citationCount,
int structuredChunkCount)
{
_citationCoverageRatio.Record(
coverageRatio,
KeyValuePair.Create<string, object?>("task_type", taskType.ToString()),
KeyValuePair.Create<string, object?>("citations", citationCount),
KeyValuePair.Create<string, object?>("structured_chunks", structuredChunkCount));
}
public void Dispose()

View File

@@ -6,11 +6,11 @@
| AIAI-31-003 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. |
| AIAI-31-004 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. |
| AIAI-31-004A | DONE (2025-11-04) | Advisory AI Guild, Platform Guild | AIAI-31-004, AIAI-31-002 | Wire `AdvisoryPipelineOrchestrator` into WebService/Worker, expose API/queue contracts, emit metrics, and stand up cache stub. | API returns plan metadata; worker executes queue message; metrics recorded; doc updated. |
| AIAI-31-004B | TODO | Advisory AI Guild, Security Guild | AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004 | Implement prompt assembler, guardrail plumbing, cache persistence, DSSE provenance; add golden outputs. | Deterministic outputs cached; guardrails enforced; tests cover prompt assembly + caching. |
| AIAI-31-004C | TODO | Advisory AI Guild, CLI Guild, Docs Guild | AIAI-31-004B, CLI-AIAI-31-003 | Deliver CLI `stella advise run <task>` command, renderers, documentation updates, and CLI golden tests. | CLI command produces deterministic output; docs published; smoke run recorded. |
| AIAI-31-004B | DONE (2025-11-06) | Advisory AI Guild, Security Guild | AIAI-31-004A, DOCS-AIAI-31-003, AUTH-AIAI-31-004 | Implement prompt assembler, guardrail plumbing, cache persistence, DSSE provenance; add golden outputs. | Deterministic outputs cached; guardrails enforced; tests cover prompt assembly + caching. |
| AIAI-31-004C | DONE (2025-11-06) | Advisory AI Guild, CLI Guild, Docs Guild | AIAI-31-004B, CLI-AIAI-31-003 | Deliver CLI `stella advise run <task>` command, renderers, documentation updates, and CLI golden tests. | CLI command produces deterministic output; docs published; smoke run recorded. |
| AIAI-31-005 | DONE (2025-11-04) | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. |
| AIAI-31-006 | DONE (2025-11-04) | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. |
| AIAI-31-007 | TODO | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. |
| AIAI-31-007 | DONE (2025-11-06) | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. |
| AIAI-31-008 | TODO | Advisory AI Guild, DevOps Guild | AIAI-31-006..007 | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. | Deployment docs merged; smoke deploy executed; offline kit updated; feature flags documented. |
| AIAI-31-010 | DONE (2025-11-02) | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement Concelier advisory raw document provider mapping CSAF/OSV payloads into structured chunks for retrieval. | Provider resolves content format, preserves metadata, and passes unit tests covering CSAF/OSV cases. |
| AIAI-31-011 | DONE (2025-11-02) | Advisory AI Guild | EXCITITOR-LNM-21-201, EXCITITOR-CORE-AOC-19-002 | Implement Excititor VEX document provider to surface structured VEX statements for vector retrieval. | Provider returns conflict-aware VEX chunks with deterministic metadata and tests for representative statements. |
@@ -31,3 +31,5 @@
> 2025-11-04: AIAI-31-005 DONE guardrail pipeline redacts secrets, enforces citation/injection policies, emits block counters, and tests (`AdvisoryGuardrailPipelineTests`) cover redaction + citation validation.
> 2025-11-04: AIAI-31-006 DONE REST endpoints enforce header scopes, apply token bucket rate limiting, sanitize prompts via guardrails, and queue execution with cached metadata. Tests executed via `dotnet test src/AdvisoryAI/__Tests/StellaOps.AdvisoryAI.Tests/StellaOps.AdvisoryAI.Tests.csproj --no-restore`.
> 2025-11-06: AIAI-31-004B/C Resuming prompt/cache hardening and CLI integration; first focus on backend client wiring and deterministic CLI outputs before full suite.
> 2025-11-06: AIAI-31-004B/C DONE Advisory AI Mongo integration validated, backend client + CLI `advise run` wired, deterministic console renderer with provenance/guardrail display added, docs refreshed, and targeted CLI tests executed.

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics.Metrics;
@@ -65,6 +66,58 @@ public sealed class AdvisoryPipelineExecutorTests : IDisposable
saved.Prompt.Should().Be("{\"prompt\":\"value\"}");
}
[Fact]
public async Task ExecuteAsync_RecordsTelemetryMeasurements()
{
using var listener = new MeterListener();
var doubleMeasurements = new List<(string Name, double Value, IEnumerable<KeyValuePair<string, object?>> Tags)>();
var longMeasurements = new List<(string Name, long Value, IEnumerable<KeyValuePair<string, object?>> Tags)>();
listener.InstrumentPublished = (instrument, l) =>
{
if (instrument.Meter.Name == AdvisoryPipelineMetrics.MeterName)
{
l.EnableMeasurementEvents(instrument);
}
};
listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
{
doubleMeasurements.Add((instrument.Name, measurement, tags));
});
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
{
longMeasurements.Add((instrument.Name, measurement, tags));
});
listener.Start();
var plan = BuildMinimalPlan(cacheKey: "CACHE-3");
var assembler = new StubPromptAssembler();
var guardrail = new StubGuardrailPipeline(blocked: true);
var store = new InMemoryAdvisoryOutputStore();
using var metrics = new AdvisoryPipelineMetrics(_meterFactory);
var executor = new AdvisoryPipelineExecutor(assembler, guardrail, store, metrics, TimeProvider.System);
var message = new AdvisoryTaskQueueMessage(plan.CacheKey, plan.Request);
await executor.ExecuteAsync(plan, message, planFromCache: false, CancellationToken.None);
listener.Dispose();
longMeasurements.Should().Contain(measurement =>
measurement.Name == "advisory_ai_guardrail_blocks_total" &&
measurement.Value == 1);
longMeasurements.Should().Contain(measurement =>
measurement.Name == "advisory_ai_validation_failures_total" &&
measurement.Value == 1);
doubleMeasurements.Should().Contain(measurement =>
measurement.Name == "advisory_ai_citation_coverage_ratio" &&
Math.Abs(measurement.Value - 1d) < 0.0001);
}
private static AdvisoryTaskPlan BuildMinimalPlan(string cacheKey)
{
var request = new AdvisoryTaskRequest(

View File

@@ -0,0 +1,176 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Context;
using StellaOps.AdvisoryAI.Documents;
using StellaOps.AdvisoryAI.Guardrails;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Outputs;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Tools;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests;
public sealed class FileSystemAdvisoryPersistenceTests : IDisposable
{
private readonly TempDirectory _tempDir = new();
[Fact]
public async Task PlanCache_PersistsPlanOnDisk()
{
var serviceOptions = Options.Create(new AdvisoryAiServiceOptions
{
Storage = new AdvisoryAiStorageOptions
{
PlanCacheDirectory = Path.Combine(_tempDir.Path, "plans"),
OutputDirectory = Path.Combine(_tempDir.Path, "outputs")
}
});
var cacheOptions = Options.Create(new AdvisoryPlanCacheOptions
{
DefaultTimeToLive = TimeSpan.FromMinutes(5),
CleanupInterval = TimeSpan.FromMinutes(5)
});
var cache = new FileSystemAdvisoryPlanCache(serviceOptions, cacheOptions, NullLogger<FileSystemAdvisoryPlanCache>.Instance);
var plan = CreatePlan("cache-123");
await cache.SetAsync(plan.CacheKey, plan, CancellationToken.None);
var reloaded = await cache.TryGetAsync(plan.CacheKey, CancellationToken.None);
reloaded.Should().NotBeNull();
reloaded!.CacheKey.Should().Be(plan.CacheKey);
reloaded.Request.AdvisoryKey.Should().Be(plan.Request.AdvisoryKey);
reloaded.StructuredChunks.Length.Should().Be(plan.StructuredChunks.Length);
reloaded.Metadata.Should().ContainKey("advisory_key").WhoseValue.Should().Be("adv-key");
}
[Fact]
public async Task OutputStore_PersistsOutputOnDisk()
{
var serviceOptions = Options.Create(new AdvisoryAiServiceOptions
{
Storage = new AdvisoryAiStorageOptions
{
PlanCacheDirectory = Path.Combine(_tempDir.Path, "plans"),
OutputDirectory = Path.Combine(_tempDir.Path, "outputs")
}
});
var store = new FileSystemAdvisoryOutputStore(serviceOptions, NullLogger<FileSystemAdvisoryOutputStore>.Instance);
var plan = CreatePlan("cache-abc");
var prompt = "{\"prompt\":\"value\"}";
var guardrail = AdvisoryGuardrailResult.Allowed(prompt);
var output = new AdvisoryPipelineOutput(
plan.CacheKey,
plan.Request.TaskType,
plan.Request.Profile,
prompt,
ImmutableArray.Create(new AdvisoryPromptCitation(1, "doc-1", "chunk-1")),
ImmutableDictionary<string, string>.Empty.Add("advisory_key", plan.Request.AdvisoryKey),
guardrail,
new AdvisoryDsseProvenance(plan.CacheKey, "hash", ImmutableArray<string>.Empty),
DateTimeOffset.UtcNow,
planFromCache: false);
await store.SaveAsync(output, CancellationToken.None);
var reloaded = await store.TryGetAsync(plan.CacheKey, plan.Request.TaskType, plan.Request.Profile, CancellationToken.None);
reloaded.Should().NotBeNull();
reloaded!.Prompt.Should().Be(prompt);
reloaded.Metadata.Should().ContainKey("advisory_key").WhoseValue.Should().Be(plan.Request.AdvisoryKey);
}
private static AdvisoryTaskPlan CreatePlan(string cacheKey)
{
var request = new AdvisoryTaskRequest(
AdvisoryTaskType.Summary,
advisoryKey: "adv-key",
artifactId: "artifact-1",
artifactPurl: "pkg:docker/sample@1.0.0",
policyVersion: "policy-1",
profile: "default",
preferredSections: new[] { "Summary" },
forceRefresh: false);
var chunk = AdvisoryChunk.Create("doc-1", "doc-1:chunk-1", "Summary", "para-1", "Summary text", new Dictionary<string, string> { ["section"] = "Summary" });
var structured = ImmutableArray.Create(chunk);
var vectorMatch = new VectorRetrievalMatch("doc-1", "doc-1:chunk-1", "Summary text", 0.95, new Dictionary<string, string>());
var vectorResult = new AdvisoryVectorResult("summary-query", ImmutableArray.Create(vectorMatch));
var sbom = SbomContextResult.Create(
"artifact-1",
"pkg:docker/sample@1.0.0",
new[]
{
new SbomVersionTimelineEntry("1.0.0", DateTimeOffset.UtcNow.AddDays(-10), null, "affected", "scanner")
},
new[]
{
new SbomDependencyPath(
new[]
{
new SbomDependencyNode("root", "1.0.0"),
new SbomDependencyNode("runtime-lib", "2.1.0")
},
isRuntime: true)
});
var dependency = DependencyAnalysisResult.Create(
"artifact-1",
new[]
{
new DependencyNodeSummary("runtime-lib", new[] { "2.1.0" }, 1, 0)
},
new Dictionary<string, string> { ["artifact_id"] = "artifact-1" });
var metadata = ImmutableDictionary<string, string>.Empty.Add("advisory_key", "adv-key");
var budget = new AdvisoryTaskBudget { PromptTokens = 1024, CompletionTokens = 256 };
return new AdvisoryTaskPlan(
request,
cacheKey,
promptTemplate: "prompts/advisory/summary.liquid",
structured,
ImmutableArray.Create(vectorResult),
sbom,
dependency,
budget,
metadata);
}
public void Dispose()
{
_tempDir.Dispose();
}
private sealed class TempDirectory : IDisposable
{
public TempDirectory()
{
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"advisory-ai-tests-{Guid.NewGuid():N}");
Directory.CreateDirectory(Path);
}
public string Path { get; }
public void Dispose()
{
try
{
if (Directory.Exists(Path))
{
Directory.Delete(Path, recursive: true);
}
}
catch
{
// ignore cleanup failures in tests
}
}
}
}

View File

@@ -44,11 +44,9 @@ public static class AocHttpResults
throw new ArgumentNullException(nameof(exception));
}
var primaryCode = exception.Result.Violations.IsDefaultOrEmpty
? "ERR_AOC_000"
: exception.Result.Violations[0].ErrorCode;
var error = AocError.FromException(exception, detail);
var violationPayload = exception.Result.Violations
var violationPayload = error.Violations
.Select(v => new Dictionary<string, object?>(StringComparer.Ordinal)
{
["code"] = v.ErrorCode,
@@ -59,8 +57,9 @@ public static class AocHttpResults
var extensionPayload = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["code"] = primaryCode,
["violations"] = violationPayload
["code"] = error.Code,
["violations"] = violationPayload,
["error"] = error
};
if (extensions is not null)
@@ -71,9 +70,9 @@ public static class AocHttpResults
}
}
var statusCode = status ?? MapErrorCodeToStatus(primaryCode);
var statusCode = status ?? MapErrorCodeToStatus(error.Code);
var problemType = type ?? DefaultProblemType;
var problemDetail = detail ?? $"AOC guard rejected the request with {primaryCode}.";
var problemDetail = detail ?? error.Message;
var problemTitle = title ?? "Aggregation-Only Contract violation";
return HttpResults.Problem(

View File

@@ -0,0 +1,37 @@
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Aoc;
/// <summary>
/// Represents a structured Aggregation-Only Contract error payload.
/// </summary>
public sealed record AocError(
[property: JsonPropertyName("code")] string Code,
[property: JsonPropertyName("message")] string Message,
[property: JsonPropertyName("violations")] ImmutableArray<AocViolation> Violations)
{
public static AocError FromResult(AocGuardResult result, string? message = null)
{
if (result is null)
{
throw new ArgumentNullException(nameof(result));
}
var violations = result.Violations;
var code = violations.IsDefaultOrEmpty ? "ERR_AOC_000" : violations[0].ErrorCode;
var resolvedMessage = message ?? $"AOC guard rejected the payload with {code}.";
return new(code, resolvedMessage, violations);
}
public static AocError FromException(AocGuardException exception, string? message = null)
{
if (exception is null)
{
throw new ArgumentNullException(nameof(exception));
}
return FromResult(exception.Result, message);
}
}

View File

@@ -1,29 +1,49 @@
using System.Collections.Immutable;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Aoc;
public sealed record AocGuardOptions
{
private static readonly ImmutableHashSet<string> DefaultRequiredTopLevel = new[]
{
"tenant",
"source",
"upstream",
"content",
"linkset",
}.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
public static AocGuardOptions Default { get; } = new();
public ImmutableHashSet<string> RequiredTopLevelFields { get; init; } = DefaultRequiredTopLevel;
/// <summary>
/// When true, signature metadata is required under upstream.signature.
/// </summary>
public bool RequireSignatureMetadata { get; init; } = true;
/// <summary>
/// When true, tenant must be a non-empty string.
/// </summary>
public bool RequireTenant { get; init; } = true;
}
public sealed record AocGuardOptions
{
private static readonly ImmutableHashSet<string> DefaultRequiredTopLevel = new[]
{
"tenant",
"source",
"upstream",
"content",
"linkset",
}.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
private static readonly ImmutableHashSet<string> DefaultAllowedTopLevel = DefaultRequiredTopLevel
.Union(new[]
{
"_id",
"identifiers",
"attributes",
"supersedes",
"createdAt",
"created_at",
"ingestedAt",
"ingested_at"
}, StringComparer.OrdinalIgnoreCase)
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
public static AocGuardOptions Default { get; } = new();
public ImmutableHashSet<string> RequiredTopLevelFields { get; init; } = DefaultRequiredTopLevel;
/// <summary>
/// Optional allowlist for top-level fields. Unknown fields trigger ERR_AOC_007.
/// </summary>
public ImmutableHashSet<string> AllowedTopLevelFields { get; init; } = DefaultAllowedTopLevel;
/// <summary>
/// When true, signature metadata is required under upstream.signature.
/// </summary>
public bool RequireSignatureMetadata { get; init; } = true;
/// <summary>
/// When true, tenant must be a non-empty string.
/// </summary>
public bool RequireTenant { get; init; } = true;
}

View File

@@ -11,16 +11,17 @@ public interface IAocGuard
public sealed class AocWriteGuard : IAocGuard
{
public AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null)
{
options ??= AocGuardOptions.Default;
var violations = ImmutableArray.CreateBuilder<AocViolation>();
var presentTopLevel = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var property in document.EnumerateObject())
{
presentTopLevel.Add(property.Name);
public AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null)
{
options ??= AocGuardOptions.Default;
var violations = ImmutableArray.CreateBuilder<AocViolation>();
var presentTopLevel = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
var allowedTopLevelFields = options.AllowedTopLevelFields ?? AocGuardOptions.Default.AllowedTopLevelFields;
foreach (var property in document.EnumerateObject())
{
presentTopLevel.Add(property.Name);
if (AocForbiddenKeys.IsForbiddenTopLevel(property.Name))
{
violations.Add(AocViolation.Create(AocViolationCode.ForbiddenField, $"/{property.Name}", $"Field '{property.Name}' is forbidden in AOC documents."));
@@ -28,14 +29,20 @@ public sealed class AocWriteGuard : IAocGuard
}
if (AocForbiddenKeys.IsDerivedField(property.Name))
{
violations.Add(AocViolation.Create(AocViolationCode.DerivedFindingDetected, $"/{property.Name}", $"Derived field '{property.Name}' must not be written during ingestion."));
}
}
foreach (var required in options.RequiredTopLevelFields)
{
if (!document.TryGetProperty(required, out var element) || element.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined)
{
violations.Add(AocViolation.Create(AocViolationCode.DerivedFindingDetected, $"/{property.Name}", $"Derived field '{property.Name}' must not be written during ingestion."));
}
if (!allowedTopLevelFields.Contains(property.Name))
{
violations.Add(AocViolation.Create(AocViolationCode.UnknownField, $"/{property.Name}", $"Field '{property.Name}' is not allowed in AOC documents."));
continue;
}
}
foreach (var required in options.RequiredTopLevelFields)
{
if (!document.TryGetProperty(required, out var element) || element.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined)
{
violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, $"/{required}", $"Required field '{required}' is missing."));
continue;

View File

@@ -45,5 +45,10 @@ public sealed class AocHttpResultsTests
Assert.Equal(2, violationsJson.GetArrayLength());
Assert.Equal("ERR_AOC_004", violationsJson[0].GetProperty("code").GetString());
Assert.Equal("/upstream", violationsJson[0].GetProperty("path").GetString());
var errorJson = root.GetProperty("error");
Assert.Equal("ERR_AOC_004", errorJson.GetProperty("code").GetString());
Assert.Equal(2, errorJson.GetProperty("violations").GetArrayLength());
Assert.False(string.IsNullOrWhiteSpace(errorJson.GetProperty("message").GetString()));
}
}

View File

@@ -0,0 +1,44 @@
using System.Collections.Immutable;
using StellaOps.Aoc;
namespace StellaOps.Aoc.Tests;
public sealed class AocErrorTests
{
[Fact]
public void FromResult_UsesFirstViolationCode()
{
var violations = ImmutableArray.Create(
AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream", "Missing"),
AocViolation.Create(AocViolationCode.ForbiddenField, "/severity", "Forbidden"));
var result = AocGuardResult.FromViolations(violations);
var error = AocError.FromResult(result);
Assert.Equal("ERR_AOC_004", error.Code);
Assert.Equal(violations, error.Violations);
}
[Fact]
public void FromResult_DefaultsWhenNoViolations()
{
var error = AocError.FromResult(AocGuardResult.Success);
Assert.Equal("ERR_AOC_000", error.Code);
Assert.Contains("ERR_AOC_000", error.Message, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void FromException_UsesCustomMessage()
{
var violations = ImmutableArray.Create(
AocViolation.Create(AocViolationCode.ForbiddenField, "/severity", "Forbidden"));
var exception = new AocGuardException(AocGuardResult.FromViolations(violations));
var error = AocError.FromException(exception, "custom");
Assert.Equal("custom", error.Message);
Assert.Equal("ERR_AOC_001", error.Code);
}
}

View File

@@ -59,16 +59,17 @@ public sealed class AocWriteGuardTests
}
[Fact]
public void Validate_FlagsForbiddenField()
{
using var document = JsonDocument.Parse("""
{
"tenant": "default",
"severity": "high",
"source": {"vendor": "osv"},
"upstream": {
"upstream_id": "GHSA-xxxx",
"content_hash": "sha256:abc",
public void Validate_FlagsForbiddenField()
{
using var document = JsonDocument.Parse("""
{
"tenant": "default",
"identifiers": {},
"severity": "high",
"source": {"vendor": "osv"},
"upstream": {
"upstream_id": "GHSA-xxxx",
"content_hash": "sha256:abc",
"signature": { "present": false }
},
"content": {
@@ -81,16 +82,74 @@ public sealed class AocWriteGuardTests
var result = Guard.Validate(document.RootElement);
Assert.False(result.IsValid);
Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_001" && v.Path == "/severity");
}
[Fact]
public void Validate_FlagsInvalidSignatureMetadata()
{
using var document = JsonDocument.Parse("""
{
"tenant": "default",
Assert.False(result.IsValid);
Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_001" && v.Path == "/severity");
}
[Fact]
public void Validate_FlagsUnknownField()
{
using var document = JsonDocument.Parse("""
{
"tenant": "default",
"source": {"vendor": "osv"},
"upstream": {
"upstream_id": "GHSA-xxxx",
"content_hash": "sha256:abc",
"signature": { "present": false }
},
"content": {
"format": "OSV",
"raw": {"id": "GHSA-xxxx"}
},
"linkset": {},
"custom_field": {"extra": true}
}
""");
var result = Guard.Validate(document.RootElement);
Assert.False(result.IsValid);
Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_007" && v.Path == "/custom_field");
}
[Fact]
public void Validate_AllowsCustomField_WhenConfigured()
{
using var document = JsonDocument.Parse("""
{
"tenant": "default",
"source": {"vendor": "osv"},
"upstream": {
"upstream_id": "GHSA-xxxx",
"content_hash": "sha256:abc",
"signature": { "present": false }
},
"content": {
"format": "OSV",
"raw": {"id": "GHSA-xxxx"}
},
"linkset": {},
"custom_field": {"extra": true}
}
""");
var options = new AocGuardOptions
{
AllowedTopLevelFields = AocGuardOptions.Default.AllowedTopLevelFields.Add("custom_field")
};
var result = Guard.Validate(document.RootElement, options);
Assert.True(result.IsValid);
}
[Fact]
public void Validate_FlagsInvalidSignatureMetadata()
{
using var document = JsonDocument.Parse("""
{
"tenant": "default",
"source": {"vendor": "osv"},
"upstream": {
"upstream_id": "GHSA-xxxx",

View File

@@ -4,7 +4,8 @@ using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Plugins;
using StellaOps.Cli.Plugins;
using StellaOps.Cli.Services.Models.AdvisoryAi;
namespace StellaOps.Cli.Commands;
@@ -35,12 +36,13 @@ internal static class CommandFactory
root.Add(BuildSourcesCommand(services, verboseOption, cancellationToken));
root.Add(BuildAocCommand(services, verboseOption, cancellationToken));
root.Add(BuildAuthCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildPolicyCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildTaskRunnerCommand(services, verboseOption, cancellationToken));
root.Add(BuildFindingsCommand(services, verboseOption, cancellationToken));
root.Add(BuildConfigCommand(options));
root.Add(BuildKmsCommand(services, verboseOption, cancellationToken));
root.Add(BuildVulnCommand(services, verboseOption, cancellationToken));
root.Add(BuildPolicyCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildTaskRunnerCommand(services, verboseOption, cancellationToken));
root.Add(BuildFindingsCommand(services, verboseOption, cancellationToken));
root.Add(BuildAdviseCommand(services, options, verboseOption, cancellationToken));
root.Add(BuildConfigCommand(options));
root.Add(BuildKmsCommand(services, verboseOption, cancellationToken));
root.Add(BuildVulnCommand(services, verboseOption, cancellationToken));
var pluginLogger = loggerFactory.CreateLogger<CliCommandModuleLoader>();
var pluginLoader = new CliCommandModuleLoader(services, options, pluginLogger);
@@ -733,7 +735,7 @@ internal static class CommandFactory
var activateVersionOption = new Option<int>("--version")
{
Description = "Revision version to activate.",
IsRequired = true
Arity = ArgumentArity.ExactlyOne
};
var activationNoteOption = new Option<string?>("--note")
@@ -809,11 +811,11 @@ internal static class CommandFactory
var taskRunner = new Command("task-runner", "Interact with Task Runner operations.");
var simulate = new Command("simulate", "Simulate a task pack and inspect the execution graph.");
var manifestOption = new Option<string>("--manifest")
{
Description = "Path to the task pack manifest (YAML).",
IsRequired = true
};
var manifestOption = new Option<string>("--manifest")
{
Description = "Path to the task pack manifest (YAML).",
Arity = ArgumentArity.ExactlyOne
};
var inputsOption = new Option<string?>("--inputs")
{
Description = "Optional JSON file containing Task Pack input values."
@@ -1042,13 +1044,110 @@ internal static class CommandFactory
cancellationToken);
});
findings.Add(list);
findings.Add(get);
findings.Add(explain);
return findings;
}
private static Command BuildVulnCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
findings.Add(list);
findings.Add(get);
findings.Add(explain);
return findings;
}
private static Command BuildAdviseCommand(IServiceProvider services, StellaOpsCliOptions options, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var advise = new Command("advise", "Interact with Advisory AI pipelines.");
_ = options;
var run = new Command("run", "Generate Advisory AI output for the specified task.");
var taskArgument = new Argument<string>("task")
{
Description = "Task to run (summary, conflict, remediation)."
};
run.Add(taskArgument);
var advisoryKeyOption = new Option<string>("--advisory-key")
{
Description = "Advisory identifier to summarise (required).",
Required = true
};
var artifactIdOption = new Option<string?>("--artifact-id")
{
Description = "Optional artifact identifier to scope SBOM context."
};
var artifactPurlOption = new Option<string?>("--artifact-purl")
{
Description = "Optional package URL to scope dependency context."
};
var policyVersionOption = new Option<string?>("--policy-version")
{
Description = "Policy revision to evaluate (defaults to current)."
};
var profileOption = new Option<string?>("--profile")
{
Description = "Advisory AI execution profile (default, fips-local, etc.)."
};
var sectionOption = new Option<string[]>("--section")
{
Description = "Preferred context sections to emphasise (repeatable).",
Arity = ArgumentArity.ZeroOrMore
};
sectionOption.AllowMultipleArgumentsPerToken = true;
var forceRefreshOption = new Option<bool>("--force-refresh")
{
Description = "Bypass cached plan/output and recompute."
};
var timeoutOption = new Option<int?>("--timeout")
{
Description = "Seconds to wait for generated output before timing out (0 = single attempt)."
};
timeoutOption.Arity = ArgumentArity.ZeroOrOne;
run.Add(advisoryKeyOption);
run.Add(artifactIdOption);
run.Add(artifactPurlOption);
run.Add(policyVersionOption);
run.Add(profileOption);
run.Add(sectionOption);
run.Add(forceRefreshOption);
run.Add(timeoutOption);
run.SetAction((parseResult, _) =>
{
var taskValue = parseResult.GetValue(taskArgument);
var advisoryKey = parseResult.GetValue(advisoryKeyOption) ?? string.Empty;
var artifactId = parseResult.GetValue(artifactIdOption);
var artifactPurl = parseResult.GetValue(artifactPurlOption);
var policyVersion = parseResult.GetValue(policyVersionOption);
var profile = parseResult.GetValue(profileOption) ?? "default";
var sections = parseResult.GetValue(sectionOption) ?? Array.Empty<string>();
var forceRefresh = parseResult.GetValue(forceRefreshOption);
var timeoutSeconds = parseResult.GetValue(timeoutOption) ?? 120;
var verbose = parseResult.GetValue(verboseOption);
if (!Enum.TryParse<AdvisoryAiTaskType>(taskValue, ignoreCase: true, out var taskType))
{
throw new InvalidOperationException($"Unknown advisory task '{taskValue}'. Expected summary, conflict, or remediation.");
}
return CommandHandlers.HandleAdviseRunAsync(
services,
taskType,
advisoryKey,
artifactId,
artifactPurl,
policyVersion,
profile,
sections,
forceRefresh,
timeoutSeconds,
verbose,
cancellationToken);
});
advise.Add(run);
return advise;
}
private static Command BuildVulnCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var vuln = new Command("vuln", "Explore vulnerability observations and overlays.");

View File

@@ -24,9 +24,10 @@ using Spectre.Console.Rendering;
using StellaOps.Auth.Client;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Prompts;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Telemetry;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.AdvisoryAi;
using StellaOps.Cli.Telemetry;
using StellaOps.Cryptography;
using StellaOps.Cryptography.Kms;
@@ -426,14 +427,154 @@ internal static class CommandHandlers
{
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleSourcesIngestAsync(
IServiceProvider services,
bool dryRun,
string source,
string input,
}
public static async Task HandleAdviseRunAsync(
IServiceProvider services,
AdvisoryAiTaskType taskType,
string advisoryKey,
string? artifactId,
string? artifactPurl,
string? policyVersion,
string profile,
IReadOnlyList<string> preferredSections,
bool forceRefresh,
int timeoutSeconds,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var client = scope.ServiceProvider.GetRequiredService<IBackendOperationsClient>();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("advise-run");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
var previousLevel = verbosity.MinimumLevel;
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
using var activity = CliActivitySource.Instance.StartActivity("cli.advisory.run", ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "advise run");
activity?.SetTag("stellaops.cli.task", taskType.ToString());
using var duration = CliMetrics.MeasureCommandDuration("advisory run");
activity?.SetTag("stellaops.cli.force_refresh", forceRefresh);
var outcome = "error";
try
{
var normalizedKey = advisoryKey?.Trim();
if (string.IsNullOrWhiteSpace(normalizedKey))
{
throw new ArgumentException("Advisory key is required.", nameof(advisoryKey));
}
activity?.SetTag("stellaops.cli.advisory.key", normalizedKey);
var normalizedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim();
activity?.SetTag("stellaops.cli.profile", normalizedProfile);
var normalizedSections = NormalizeSections(preferredSections);
var request = new AdvisoryPipelinePlanRequestModel
{
TaskType = taskType,
AdvisoryKey = normalizedKey,
ArtifactId = string.IsNullOrWhiteSpace(artifactId) ? null : artifactId!.Trim(),
ArtifactPurl = string.IsNullOrWhiteSpace(artifactPurl) ? null : artifactPurl!.Trim(),
PolicyVersion = string.IsNullOrWhiteSpace(policyVersion) ? null : policyVersion!.Trim(),
Profile = normalizedProfile,
PreferredSections = normalizedSections.Length > 0 ? normalizedSections : null,
ForceRefresh = forceRefresh
};
logger.LogInformation("Requesting advisory plan for {TaskType} (advisory={AdvisoryKey}).", taskType, normalizedKey);
var plan = await client.CreateAdvisoryPipelinePlanAsync(taskType, request, cancellationToken).ConfigureAwait(false);
activity?.SetTag("stellaops.cli.advisory.cache_key", plan.CacheKey);
RenderAdvisoryPlan(plan);
logger.LogInformation("Plan {CacheKey} queued with {Chunks} chunks and {Vectors} vectors.",
plan.CacheKey,
plan.Chunks.Count,
plan.Vectors.Count);
var pollDelay = TimeSpan.FromSeconds(1);
var shouldWait = timeoutSeconds > 0;
var deadline = shouldWait ? DateTimeOffset.UtcNow + TimeSpan.FromSeconds(timeoutSeconds) : DateTimeOffset.UtcNow;
AdvisoryPipelineOutputModel? output = null;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
output = await client
.TryGetAdvisoryPipelineOutputAsync(plan.CacheKey, taskType, normalizedProfile, cancellationToken)
.ConfigureAwait(false);
if (output is not null)
{
break;
}
if (!shouldWait || DateTimeOffset.UtcNow >= deadline)
{
break;
}
logger.LogDebug("Advisory output pending for {CacheKey}; retrying in {DelaySeconds}s.", plan.CacheKey, pollDelay.TotalSeconds);
await Task.Delay(pollDelay, cancellationToken).ConfigureAwait(false);
}
if (output is null)
{
logger.LogError("Timed out after {Timeout}s waiting for advisory output (cache key {CacheKey}).",
Math.Max(timeoutSeconds, 0),
plan.CacheKey);
activity?.SetStatus(ActivityStatusCode.Error, "timeout");
outcome = "timeout";
Environment.ExitCode = Environment.ExitCode == 0 ? 70 : Environment.ExitCode;
return;
}
activity?.SetTag("stellaops.cli.advisory.generated_at", output.GeneratedAtUtc.ToString("O", CultureInfo.InvariantCulture));
activity?.SetTag("stellaops.cli.advisory.cache_hit", output.PlanFromCache);
logger.LogInformation("Advisory output ready (cache key {CacheKey}).", output.CacheKey);
RenderAdvisoryOutput(output);
if (output.Guardrail.Blocked)
{
logger.LogError("Guardrail blocked advisory output (cache key {CacheKey}).", output.CacheKey);
activity?.SetStatus(ActivityStatusCode.Error, "guardrail_blocked");
outcome = "blocked";
Environment.ExitCode = Environment.ExitCode == 0 ? 65 : Environment.ExitCode;
return;
}
activity?.SetStatus(ActivityStatusCode.Ok);
outcome = output.PlanFromCache ? "cache-hit" : "ok";
Environment.ExitCode = 0;
}
catch (OperationCanceledException)
{
outcome = "cancelled";
activity?.SetStatus(ActivityStatusCode.Error, "cancelled");
Environment.ExitCode = Environment.ExitCode == 0 ? 130 : Environment.ExitCode;
}
catch (Exception ex)
{
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
logger.LogError(ex, "Failed to run advisory task.");
outcome = "error";
Environment.ExitCode = Environment.ExitCode == 0 ? 1 : Environment.ExitCode;
}
finally
{
activity?.SetTag("stellaops.cli.advisory.outcome", outcome);
CliMetrics.RecordAdvisoryRun(taskType.ToString(), outcome);
verbosity.MinimumLevel = previousLevel;
}
}
public static async Task HandleSourcesIngestAsync(
IServiceProvider services,
bool dryRun,
string source,
string input,
string? tenantOverride,
string format,
bool disableColor,
@@ -6137,7 +6278,156 @@ internal static class CommandHandlers
["ERR_AOC_007"] = 17
};
private static IDictionary<string, object?> RemoveNullValues(Dictionary<string, object?> source)
private static string[] NormalizeSections(IReadOnlyList<string> sections)
{
if (sections is null || sections.Count == 0)
{
return Array.Empty<string>();
}
return sections
.Where(section => !string.IsNullOrWhiteSpace(section))
.Select(section => section.Trim())
.Where(section => section.Length > 0)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private static void RenderAdvisoryPlan(AdvisoryPipelinePlanResponseModel plan)
{
var console = AnsiConsole.Console;
var summary = new Table()
.Border(TableBorder.Rounded)
.Title("[bold]Advisory Plan[/]");
summary.AddColumn("Field");
summary.AddColumn("Value");
summary.AddRow("Task", Markup.Escape(plan.TaskType));
summary.AddRow("Cache Key", Markup.Escape(plan.CacheKey));
summary.AddRow("Prompt Template", Markup.Escape(plan.PromptTemplate));
summary.AddRow("Chunks", plan.Chunks.Count.ToString(CultureInfo.InvariantCulture));
summary.AddRow("Vectors", plan.Vectors.Count.ToString(CultureInfo.InvariantCulture));
summary.AddRow("Prompt Tokens", plan.Budget.PromptTokens.ToString(CultureInfo.InvariantCulture));
summary.AddRow("Completion Tokens", plan.Budget.CompletionTokens.ToString(CultureInfo.InvariantCulture));
console.Write(summary);
if (plan.Metadata.Count > 0)
{
console.Write(CreateKeyValueTable("Plan Metadata", plan.Metadata));
}
}
private static void RenderAdvisoryOutput(AdvisoryPipelineOutputModel output)
{
var console = AnsiConsole.Console;
var summary = new Table()
.Border(TableBorder.Rounded)
.Title("[bold]Advisory Output[/]");
summary.AddColumn("Field");
summary.AddColumn("Value");
summary.AddRow("Cache Key", Markup.Escape(output.CacheKey));
summary.AddRow("Task", Markup.Escape(output.TaskType));
summary.AddRow("Profile", Markup.Escape(output.Profile));
summary.AddRow("Generated", output.GeneratedAtUtc.ToString("O", CultureInfo.InvariantCulture));
summary.AddRow("Plan From Cache", output.PlanFromCache ? "yes" : "no");
summary.AddRow("Citations", output.Citations.Count.ToString(CultureInfo.InvariantCulture));
summary.AddRow("Guardrail Blocked", output.Guardrail.Blocked ? "[red]yes[/]" : "no");
console.Write(summary);
if (!string.IsNullOrWhiteSpace(output.Prompt))
{
var panel = new Panel(new Markup(Markup.Escape(output.Prompt)))
{
Header = new PanelHeader("Prompt"),
Border = BoxBorder.Rounded,
Expand = true
};
console.Write(panel);
}
if (output.Citations.Count > 0)
{
var citations = new Table()
.Border(TableBorder.Minimal)
.Title("[grey]Citations[/]");
citations.AddColumn("Index");
citations.AddColumn("Document");
citations.AddColumn("Chunk");
foreach (var citation in output.Citations.OrderBy(c => c.Index))
{
citations.AddRow(
citation.Index.ToString(CultureInfo.InvariantCulture),
Markup.Escape(citation.DocumentId),
Markup.Escape(citation.ChunkId));
}
console.Write(citations);
}
if (output.Metadata.Count > 0)
{
console.Write(CreateKeyValueTable("Output Metadata", output.Metadata));
}
if (output.Guardrail.Metadata.Count > 0)
{
console.Write(CreateKeyValueTable("Guardrail Metadata", output.Guardrail.Metadata));
}
if (output.Guardrail.Violations.Count > 0)
{
var violations = new Table()
.Border(TableBorder.Minimal)
.Title("[red]Guardrail Violations[/]");
violations.AddColumn("Code");
violations.AddColumn("Message");
foreach (var violation in output.Guardrail.Violations)
{
violations.AddRow(Markup.Escape(violation.Code), Markup.Escape(violation.Message));
}
console.Write(violations);
}
var provenance = new Table()
.Border(TableBorder.Minimal)
.Title("[grey]Provenance[/]");
provenance.AddColumn("Field");
provenance.AddColumn("Value");
provenance.AddRow("Input Digest", Markup.Escape(output.Provenance.InputDigest));
provenance.AddRow("Output Hash", Markup.Escape(output.Provenance.OutputHash));
var signatures = output.Provenance.Signatures.Count == 0
? "none"
: string.Join(Environment.NewLine, output.Provenance.Signatures.Select(Markup.Escape));
provenance.AddRow("Signatures", signatures);
console.Write(provenance);
}
private static Table CreateKeyValueTable(string title, IReadOnlyDictionary<string, string> entries)
{
var table = new Table()
.Border(TableBorder.Minimal)
.Title($"[grey]{Markup.Escape(title)}[/]");
table.AddColumn("Key");
table.AddColumn("Value");
foreach (var kvp in entries.OrderBy(kvp => kvp.Key, StringComparer.OrdinalIgnoreCase))
{
table.AddRow(Markup.Escape(kvp.Key), Markup.Escape(kvp.Value));
}
return table;
}
private static IDictionary<string, object?> RemoveNullValues(Dictionary<string, object?> source)
{
foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList())
{

View File

@@ -26,13 +26,15 @@ public static class CliBootstrapper
options.PostBind = (cliOptions, configuration) =>
{
cliOptions.ApiKey = ResolveWithFallback(cliOptions.ApiKey, configuration, "API_KEY", "StellaOps:ApiKey", "ApiKey");
cliOptions.BackendUrl = ResolveWithFallback(cliOptions.BackendUrl, configuration, "STELLAOPS_BACKEND_URL", "StellaOps:BackendUrl", "BackendUrl");
cliOptions.ConcelierUrl = ResolveWithFallback(cliOptions.ConcelierUrl, configuration, "STELLAOPS_CONCELIER_URL", "StellaOps:ConcelierUrl", "ConcelierUrl");
cliOptions.BackendUrl = ResolveWithFallback(cliOptions.BackendUrl, configuration, "STELLAOPS_BACKEND_URL", "StellaOps:BackendUrl", "BackendUrl");
cliOptions.ConcelierUrl = ResolveWithFallback(cliOptions.ConcelierUrl, configuration, "STELLAOPS_CONCELIER_URL", "StellaOps:ConcelierUrl", "ConcelierUrl");
cliOptions.AdvisoryAiUrl = ResolveWithFallback(cliOptions.AdvisoryAiUrl, configuration, "STELLAOPS_ADVISORYAI_URL", "StellaOps:AdvisoryAiUrl", "AdvisoryAiUrl");
cliOptions.ScannerSignaturePublicKeyPath = ResolveWithFallback(cliOptions.ScannerSignaturePublicKeyPath, configuration, "SCANNER_PUBLIC_KEY", "STELLAOPS_SCANNER_PUBLIC_KEY", "StellaOps:ScannerSignaturePublicKeyPath", "ScannerSignaturePublicKeyPath");
cliOptions.ApiKey = cliOptions.ApiKey?.Trim() ?? string.Empty;
cliOptions.BackendUrl = cliOptions.BackendUrl?.Trim() ?? string.Empty;
cliOptions.ConcelierUrl = cliOptions.ConcelierUrl?.Trim() ?? string.Empty;
cliOptions.ConcelierUrl = cliOptions.ConcelierUrl?.Trim() ?? string.Empty;
cliOptions.AdvisoryAiUrl = cliOptions.AdvisoryAiUrl?.Trim() ?? string.Empty;
cliOptions.ScannerSignaturePublicKeyPath = cliOptions.ScannerSignaturePublicKeyPath?.Trim() ?? string.Empty;
var attemptsRaw = ResolveWithFallback(

View File

@@ -11,7 +11,9 @@ public sealed class StellaOpsCliOptions
public string BackendUrl { get; set; } = string.Empty;
public string ConcelierUrl { get; set; } = string.Empty;
public string ConcelierUrl { get; set; } = string.Empty;
public string AdvisoryAiUrl { get; set; } = string.Empty;
public string ScannerCacheDirectory { get; set; } = "scanners";

View File

@@ -18,7 +18,8 @@ using Microsoft.Extensions.Logging;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.Client;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.AdvisoryAi;
using StellaOps.Cli.Services.Models.Transport;
namespace StellaOps.Cli.Services;
@@ -30,10 +31,12 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
private static readonly IReadOnlyDictionary<string, object?> EmptyMetadata =
new ReadOnlyDictionary<string, object?>(new Dictionary<string, object?>(0, StringComparer.OrdinalIgnoreCase));
private const string OperatorReasonParameterName = "operator_reason";
private const string OperatorTicketParameterName = "operator_ticket";
private const string BackfillReasonParameterName = "backfill_reason";
private const string BackfillTicketParameterName = "backfill_ticket";
private const string OperatorReasonParameterName = "operator_reason";
private const string OperatorTicketParameterName = "operator_ticket";
private const string BackfillReasonParameterName = "backfill_reason";
private const string BackfillTicketParameterName = "backfill_ticket";
private const string AdvisoryScopesHeader = "X-StellaOps-Scopes";
private const string AdvisoryRunScope = "advisory:run";
private readonly HttpClient _httpClient;
private readonly StellaOpsCliOptions _options;
@@ -885,13 +888,122 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
throw new InvalidOperationException("EntryTrace response payload was empty.");
}
return result;
}
public async Task<IReadOnlyList<ExcititorProviderSummary>> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
return result;
}
public async Task<AdvisoryPipelinePlanResponseModel> CreateAdvisoryPipelinePlanAsync(
AdvisoryAiTaskType taskType,
AdvisoryPipelinePlanRequestModel request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var taskSegment = taskType.ToString().ToLowerInvariant();
var relative = $"v1/advisory-ai/pipeline/{taskSegment}";
var payload = new AdvisoryPipelinePlanRequestModel
{
TaskType = taskType,
AdvisoryKey = string.IsNullOrWhiteSpace(request.AdvisoryKey) ? string.Empty : request.AdvisoryKey.Trim(),
ArtifactId = string.IsNullOrWhiteSpace(request.ArtifactId) ? null : request.ArtifactId!.Trim(),
ArtifactPurl = string.IsNullOrWhiteSpace(request.ArtifactPurl) ? null : request.ArtifactPurl!.Trim(),
PolicyVersion = string.IsNullOrWhiteSpace(request.PolicyVersion) ? null : request.PolicyVersion!.Trim(),
Profile = string.IsNullOrWhiteSpace(request.Profile) ? "default" : request.Profile!.Trim(),
PreferredSections = request.PreferredSections is null
? null
: request.PreferredSections
.Where(static section => !string.IsNullOrWhiteSpace(section))
.Select(static section => section.Trim())
.ToArray(),
ForceRefresh = request.ForceRefresh
};
using var httpRequest = CreateRequest(HttpMethod.Post, relative);
ApplyAdvisoryAiEndpoint(httpRequest, taskType);
await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
httpRequest.Content = JsonContent.Create(payload, options: SerializerOptions);
using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
try
{
var plan = await response.Content.ReadFromJsonAsync<AdvisoryPipelinePlanResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
if (plan is null)
{
throw new InvalidOperationException("Advisory AI plan response was empty.");
}
return plan;
}
catch (JsonException ex)
{
var raw = response.Content is null
? string.Empty
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Failed to parse advisory plan response. {ex.Message}", ex)
{
Data = { ["payload"] = raw }
};
}
}
public async Task<AdvisoryPipelineOutputModel?> TryGetAdvisoryPipelineOutputAsync(
string cacheKey,
AdvisoryAiTaskType taskType,
string profile,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(cacheKey))
{
throw new ArgumentException("Cache key is required.", nameof(cacheKey));
}
var encodedKey = Uri.EscapeDataString(cacheKey);
var taskSegment = Uri.EscapeDataString(taskType.ToString().ToLowerInvariant());
var resolvedProfile = string.IsNullOrWhiteSpace(profile) ? "default" : profile.Trim();
var relative = $"v1/advisory-ai/outputs/{encodedKey}?taskType={taskSegment}&profile={Uri.EscapeDataString(resolvedProfile)}";
using var request = CreateRequest(HttpMethod.Get, relative);
ApplyAdvisoryAiEndpoint(request, taskType);
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
if (!response.IsSuccessStatusCode)
{
var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException(failure);
}
try
{
return await response.Content.ReadFromJsonAsync<AdvisoryPipelineOutputModel>(SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (JsonException ex)
{
var raw = response.Content is null
? string.Empty
: await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
throw new InvalidOperationException($"Failed to parse advisory output response. {ex.Message}", ex)
{
Data = { ["payload"] = raw }
};
}
}
public async Task<IReadOnlyList<ExcititorProviderSummary>> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken)
{
EnsureBackendConfigured();
var query = includeDisabled ? "?includeDisabled=true" : string.Empty;
using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}");
await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false);
@@ -1778,7 +1890,44 @@ internal sealed class BackendOperationsClient : IBackendOperationsClient
return string.IsNullOrWhiteSpace(value) ? null : value.Trim();
}
private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri)
private void ApplyAdvisoryAiEndpoint(HttpRequestMessage request, AdvisoryAiTaskType taskType)
{
if (request is null)
{
throw new ArgumentNullException(nameof(request));
}
var requestUri = request.RequestUri ?? throw new InvalidOperationException("Request URI was not initialized.");
if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl) &&
Uri.TryCreate(_options.AdvisoryAiUrl, UriKind.Absolute, out var advisoryBase))
{
if (!requestUri.IsAbsoluteUri)
{
request.RequestUri = new Uri(advisoryBase, requestUri.ToString());
}
}
else if (!string.IsNullOrWhiteSpace(_options.AdvisoryAiUrl))
{
throw new InvalidOperationException($"Advisory AI URL '{_options.AdvisoryAiUrl}' is not a valid absolute URI.");
}
else
{
EnsureBackendConfigured();
}
var taskScope = $"advisory:{taskType.ToString().ToLowerInvariant()}";
var combined = $"{AdvisoryRunScope} {taskScope}";
if (request.Headers.Contains(AdvisoryScopesHeader))
{
request.Headers.Remove(AdvisoryScopesHeader);
}
request.Headers.TryAddWithoutValidation(AdvisoryScopesHeader, combined);
}
private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri)
{
if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri))
{

View File

@@ -4,6 +4,7 @@ using System.Threading;
using System.Threading.Tasks;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.AdvisoryAi;
namespace StellaOps.Cli.Services;
@@ -46,4 +47,8 @@ internal interface IBackendOperationsClient
Task<PolicyFindingExplainResult> GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken);
Task<EntryTraceResponseModel?> GetEntryTraceAsync(string scanId, CancellationToken cancellationToken);
Task<AdvisoryPipelinePlanResponseModel> CreateAdvisoryPipelinePlanAsync(AdvisoryAiTaskType taskType, AdvisoryPipelinePlanRequestModel request, CancellationToken cancellationToken);
Task<AdvisoryPipelineOutputModel?> TryGetAdvisoryPipelineOutputAsync(string cacheKey, AdvisoryAiTaskType taskType, string profile, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,139 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Cli.Services.Models.AdvisoryAi;
internal enum AdvisoryAiTaskType
{
Summary,
Conflict,
Remediation
}
internal sealed class AdvisoryPipelinePlanRequestModel
{
public AdvisoryAiTaskType TaskType { get; init; }
public string AdvisoryKey { get; init; } = string.Empty;
public string? ArtifactId { get; init; }
public string? ArtifactPurl { get; init; }
public string? PolicyVersion { get; init; }
public string Profile { get; init; } = "default";
public IReadOnlyList<string>? PreferredSections { get; init; }
public bool ForceRefresh { get; init; }
}
internal sealed class AdvisoryPipelinePlanResponseModel
{
public string CacheKey { get; init; } = string.Empty;
public string TaskType { get; init; } = string.Empty;
public string PromptTemplate { get; init; } = string.Empty;
public AdvisoryTaskBudgetModel Budget { get; init; } = new();
public IReadOnlyList<PipelineChunkSummaryModel> Chunks { get; init; } = Array.Empty<PipelineChunkSummaryModel>();
public IReadOnlyList<PipelineVectorSummaryModel> Vectors { get; init; } = Array.Empty<PipelineVectorSummaryModel>();
public Dictionary<string, string> Metadata { get; init; } = new(StringComparer.Ordinal);
}
internal sealed class AdvisoryTaskBudgetModel
{
public int PromptTokens { get; init; }
public int CompletionTokens { get; init; }
}
internal sealed class PipelineChunkSummaryModel
{
public string DocumentId { get; init; } = string.Empty;
public string ChunkId { get; init; } = string.Empty;
public string Section { get; init; } = string.Empty;
public string? DisplaySection { get; init; }
}
internal sealed class PipelineVectorSummaryModel
{
public string Query { get; init; } = string.Empty;
public IReadOnlyList<PipelineVectorMatchSummaryModel> Matches { get; init; } = Array.Empty<PipelineVectorMatchSummaryModel>();
}
internal sealed class PipelineVectorMatchSummaryModel
{
public string ChunkId { get; init; } = string.Empty;
public double Score { get; init; }
}
internal sealed class AdvisoryPipelineOutputModel
{
public string CacheKey { get; init; } = string.Empty;
public string TaskType { get; init; } = string.Empty;
public string Profile { get; init; } = string.Empty;
public string Prompt { get; init; } = string.Empty;
public IReadOnlyList<AdvisoryOutputCitationModel> Citations { get; init; } = Array.Empty<AdvisoryOutputCitationModel>();
public Dictionary<string, string> Metadata { get; init; } = new(StringComparer.Ordinal);
public AdvisoryOutputGuardrailModel Guardrail { get; init; } = new();
public AdvisoryOutputProvenanceModel Provenance { get; init; } = new();
public DateTimeOffset GeneratedAtUtc { get; init; }
public bool PlanFromCache { get; init; }
}
internal sealed class AdvisoryOutputCitationModel
{
public int Index { get; init; }
public string DocumentId { get; init; } = string.Empty;
public string ChunkId { get; init; } = string.Empty;
}
internal sealed class AdvisoryOutputGuardrailModel
{
public bool Blocked { get; init; }
public string SanitizedPrompt { get; init; } = string.Empty;
public IReadOnlyList<AdvisoryOutputGuardrailViolationModel> Violations { get; init; } = Array.Empty<AdvisoryOutputGuardrailViolationModel>();
public Dictionary<string, string> Metadata { get; init; } = new(StringComparer.Ordinal);
}
internal sealed class AdvisoryOutputGuardrailViolationModel
{
public string Code { get; init; } = string.Empty;
public string Message { get; init; } = string.Empty;
}
internal sealed class AdvisoryOutputProvenanceModel
{
public string InputDigest { get; init; } = string.Empty;
public string OutputHash { get; init; } = string.Empty;
public IReadOnlyList<string> Signatures { get; init; } = Array.Empty<string>();
}

View File

@@ -20,6 +20,7 @@ internal static class CliMetrics
private static readonly Counter<long> PolicyFindingsListCounter = Meter.CreateCounter<long>("stellaops.cli.policy.findings.list.count");
private static readonly Counter<long> PolicyFindingsGetCounter = Meter.CreateCounter<long>("stellaops.cli.policy.findings.get.count");
private static readonly Counter<long> PolicyFindingsExplainCounter = Meter.CreateCounter<long>("stellaops.cli.policy.findings.explain.count");
private static readonly Counter<long> AdvisoryRunCounter = Meter.CreateCounter<long>("stellaops.cli.advisory.run.count");
private static readonly Histogram<double> CommandDurationHistogram = Meter.CreateHistogram<double>("stellaops.cli.command.duration.ms");
public static void RecordScannerDownload(string channel, bool fromCache)
@@ -70,6 +71,13 @@ internal static class CliMetrics
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
});
public static void RecordAdvisoryRun(string taskType, string outcome)
=> AdvisoryRunCounter.Add(1, new KeyValuePair<string, object?>[]
{
new("task", string.IsNullOrWhiteSpace(taskType) ? "unknown" : taskType.ToLowerInvariant()),
new("outcome", string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome)
});
public static void RecordSourcesDryRun(string status)
=> SourcesDryRunCounter.Add(1, new KeyValuePair<string, object?>[]
{

View File

@@ -23,6 +23,7 @@ using StellaOps.Cli.Commands;
using StellaOps.Cli.Configuration;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.AdvisoryAi;
using StellaOps.Cli.Telemetry;
using StellaOps.Cli.Tests.Testing;
using StellaOps.Cryptography;
@@ -223,6 +224,291 @@ public sealed class CommandHandlersTests
}
}
[Fact]
public async Task HandleAdviseRunAsync_WritesOutputAndSetsExitCode()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
var testConsole = new TestConsole();
try
{
Environment.ExitCode = 0;
AnsiConsole.Console = testConsole;
var planResponse = new AdvisoryPipelinePlanResponseModel
{
TaskType = AdvisoryAiTaskType.Summary.ToString(),
CacheKey = "cache-123",
PromptTemplate = "prompts/advisory/summary.liquid",
Budget = new AdvisoryTaskBudgetModel
{
PromptTokens = 512,
CompletionTokens = 128
},
Chunks = new[]
{
new PipelineChunkSummaryModel
{
DocumentId = "doc-1",
ChunkId = "chunk-1",
Section = "Summary",
DisplaySection = "Summary"
}
},
Vectors = new[]
{
new PipelineVectorSummaryModel
{
Query = "summary query",
Matches = new[]
{
new PipelineVectorMatchSummaryModel
{
ChunkId = "chunk-1",
Score = 0.9
}
}
}
},
Metadata = new Dictionary<string, string>
{
["profile"] = "default"
}
};
var outputResponse = new AdvisoryPipelineOutputModel
{
CacheKey = planResponse.CacheKey,
TaskType = planResponse.TaskType,
Profile = "default",
Prompt = "Summary result",
Citations = new[]
{
new AdvisoryOutputCitationModel
{
Index = 0,
DocumentId = "doc-1",
ChunkId = "chunk-1"
}
},
Metadata = new Dictionary<string, string>
{
["confidence"] = "high"
},
Guardrail = new AdvisoryOutputGuardrailModel
{
Blocked = false,
SanitizedPrompt = "Summary result",
Violations = Array.Empty<AdvisoryOutputGuardrailViolationModel>(),
Metadata = new Dictionary<string, string>()
},
Provenance = new AdvisoryOutputProvenanceModel
{
InputDigest = "sha256:aaa",
OutputHash = "sha256:bbb",
Signatures = Array.Empty<string>()
},
GeneratedAtUtc = DateTimeOffset.Parse("2025-11-06T12:00:00Z", CultureInfo.InvariantCulture),
PlanFromCache = false
};
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
AdvisoryPlanResponse = planResponse,
AdvisoryOutputResponse = outputResponse
};
var provider = BuildServiceProvider(backend);
await CommandHandlers.HandleAdviseRunAsync(
provider,
AdvisoryAiTaskType.Summary,
" ADV-1 ",
null,
null,
null,
"default",
new[] { "impact", "impact " },
forceRefresh: false,
timeoutSeconds: 0,
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(0, Environment.ExitCode);
Assert.Single(backend.AdvisoryPlanRequests);
var request = backend.AdvisoryPlanRequests[0];
Assert.Equal(AdvisoryAiTaskType.Summary, request.TaskType);
Assert.Equal("ADV-1", request.Request.AdvisoryKey);
Assert.NotNull(request.Request.PreferredSections);
Assert.Single(request.Request.PreferredSections!);
Assert.Equal("impact", request.Request.PreferredSections![0]);
Assert.Single(backend.AdvisoryOutputRequests);
Assert.Equal(planResponse.CacheKey, backend.AdvisoryOutputRequests[0].CacheKey);
Assert.Equal("default", backend.AdvisoryOutputRequests[0].Profile);
var output = testConsole.Output;
Assert.Contains("Advisory Output", output, StringComparison.OrdinalIgnoreCase);
Assert.Contains(planResponse.CacheKey, output, StringComparison.Ordinal);
Assert.Contains("Summary result", output, StringComparison.Ordinal);
}
finally
{
AnsiConsole.Console = originalConsole;
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandleAdviseRunAsync_ReturnsGuardrailExitCodeOnBlock()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
var testConsole = new TestConsole();
try
{
Environment.ExitCode = 0;
AnsiConsole.Console = testConsole;
var planResponse = new AdvisoryPipelinePlanResponseModel
{
TaskType = AdvisoryAiTaskType.Remediation.ToString(),
CacheKey = "cache-guard",
PromptTemplate = "prompts/advisory/remediation.liquid",
Budget = new AdvisoryTaskBudgetModel
{
PromptTokens = 256,
CompletionTokens = 64
},
Chunks = Array.Empty<PipelineChunkSummaryModel>(),
Vectors = Array.Empty<PipelineVectorSummaryModel>(),
Metadata = new Dictionary<string, string>()
};
var outputResponse = new AdvisoryPipelineOutputModel
{
CacheKey = planResponse.CacheKey,
TaskType = planResponse.TaskType,
Profile = "default",
Prompt = "Blocked output",
Citations = Array.Empty<AdvisoryOutputCitationModel>(),
Metadata = new Dictionary<string, string>(),
Guardrail = new AdvisoryOutputGuardrailModel
{
Blocked = true,
SanitizedPrompt = "Blocked output",
Violations = new[]
{
new AdvisoryOutputGuardrailViolationModel
{
Code = "PROMPT_INJECTION",
Message = "Detected prompt injection attempt."
}
},
Metadata = new Dictionary<string, string>()
},
Provenance = new AdvisoryOutputProvenanceModel
{
InputDigest = "sha256:ccc",
OutputHash = "sha256:ddd",
Signatures = Array.Empty<string>()
},
GeneratedAtUtc = DateTimeOffset.Parse("2025-11-06T13:05:00Z", CultureInfo.InvariantCulture),
PlanFromCache = true
};
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
AdvisoryPlanResponse = planResponse,
AdvisoryOutputResponse = outputResponse
};
var provider = BuildServiceProvider(backend);
await CommandHandlers.HandleAdviseRunAsync(
provider,
AdvisoryAiTaskType.Remediation,
"ADV-2",
null,
null,
null,
"default",
Array.Empty<string>(),
forceRefresh: true,
timeoutSeconds: 0,
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(65, Environment.ExitCode);
Assert.Contains("Guardrail Violations", testConsole.Output, StringComparison.OrdinalIgnoreCase);
}
finally
{
AnsiConsole.Console = originalConsole;
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandleAdviseRunAsync_TimesOutWhenOutputMissing()
{
var originalExit = Environment.ExitCode;
var originalConsole = AnsiConsole.Console;
try
{
Environment.ExitCode = 0;
AnsiConsole.Console = new TestConsole();
var planResponse = new AdvisoryPipelinePlanResponseModel
{
TaskType = AdvisoryAiTaskType.Conflict.ToString(),
CacheKey = "cache-timeout",
PromptTemplate = "prompts/advisory/conflict.liquid",
Budget = new AdvisoryTaskBudgetModel
{
PromptTokens = 128,
CompletionTokens = 32
},
Chunks = Array.Empty<PipelineChunkSummaryModel>(),
Vectors = Array.Empty<PipelineVectorSummaryModel>(),
Metadata = new Dictionary<string, string>()
};
var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null))
{
AdvisoryPlanResponse = planResponse,
AdvisoryOutputResponse = null
};
var provider = BuildServiceProvider(backend);
await CommandHandlers.HandleAdviseRunAsync(
provider,
AdvisoryAiTaskType.Conflict,
"ADV-3",
null,
null,
null,
"default",
Array.Empty<string>(),
forceRefresh: false,
timeoutSeconds: 0,
verbose: false,
cancellationToken: CancellationToken.None);
Assert.Equal(70, Environment.ExitCode);
Assert.Single(backend.AdvisoryOutputRequests);
}
finally
{
AnsiConsole.Console = originalConsole;
Environment.ExitCode = originalExit;
}
}
[Fact]
public async Task HandleAuthLoginAsync_UsesClientCredentialsFlow()
{
@@ -1726,10 +2012,16 @@ spec:
Assert.NotNull(backend.LastTaskRunnerSimulationRequest);
var consoleOutput = writer.ToString();
Assert.Contains("\"planHash\":\"hash-xyz789\"", consoleOutput, StringComparison.Ordinal);
using (var consoleJson = JsonDocument.Parse(consoleOutput))
{
Assert.Equal("hash-xyz789", consoleJson.RootElement.GetProperty("planHash").GetString());
}
var fileOutput = await File.ReadAllTextAsync(outputPath);
Assert.Contains("\"planHash\":\"hash-xyz789\"", fileOutput, StringComparison.Ordinal);
using (var fileJson = JsonDocument.Parse(fileOutput))
{
Assert.Equal("hash-xyz789", fileJson.RootElement.GetProperty("planHash").GetString());
}
Assert.True(backend.LastTaskRunnerSimulationRequest!.Inputs!.TryGetPropertyValue("dryRun", out var dryRunNode));
Assert.False(dryRunNode!.GetValue<bool>());
@@ -2738,6 +3030,13 @@ spec:
public EntryTraceResponseModel? EntryTraceResponse { get; set; }
public Exception? EntryTraceException { get; set; }
public string? LastEntryTraceScanId { get; private set; }
public List<(AdvisoryAiTaskType TaskType, AdvisoryPipelinePlanRequestModel Request)> AdvisoryPlanRequests { get; } = new();
public AdvisoryPipelinePlanResponseModel? AdvisoryPlanResponse { get; set; }
public Exception? AdvisoryPlanException { get; set; }
public Queue<AdvisoryPipelineOutputModel?> AdvisoryOutputQueue { get; } = new();
public AdvisoryPipelineOutputModel? AdvisoryOutputResponse { get; set; }
public Exception? AdvisoryOutputException { get; set; }
public List<(string CacheKey, AdvisoryAiTaskType TaskType, string Profile)> AdvisoryOutputRequests { get; } = new();
public Task<ScannerArtifactResult> DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken)
=> throw new NotImplementedException();
@@ -2890,10 +3189,52 @@ spec:
return Task.FromResult(EntryTraceResponse);
}
public Task<AdvisoryPipelinePlanResponseModel> CreateAdvisoryPipelinePlanAsync(AdvisoryAiTaskType taskType, AdvisoryPipelinePlanRequestModel request, CancellationToken cancellationToken)
{
AdvisoryPlanRequests.Add((taskType, request));
if (AdvisoryPlanException is not null)
{
throw AdvisoryPlanException;
}
var response = AdvisoryPlanResponse ?? new AdvisoryPipelinePlanResponseModel
{
TaskType = taskType.ToString(),
CacheKey = "stub-cache-key",
PromptTemplate = "prompts/advisory/stub.liquid",
Budget = new AdvisoryTaskBudgetModel
{
PromptTokens = 0,
CompletionTokens = 0
},
Chunks = Array.Empty<PipelineChunkSummaryModel>(),
Vectors = Array.Empty<PipelineVectorSummaryModel>(),
Metadata = new Dictionary<string, string>(StringComparer.Ordinal)
};
return Task.FromResult(response);
}
public Task<AdvisoryPipelineOutputModel?> TryGetAdvisoryPipelineOutputAsync(string cacheKey, AdvisoryAiTaskType taskType, string profile, CancellationToken cancellationToken)
{
AdvisoryOutputRequests.Add((cacheKey, taskType, profile));
if (AdvisoryOutputException is not null)
{
throw AdvisoryOutputException;
}
if (AdvisoryOutputQueue.Count > 0)
{
return Task.FromResult(AdvisoryOutputQueue.Dequeue());
}
return Task.FromResult(AdvisoryOutputResponse);
}
}
private sealed class StubExecutor : IScannerExecutor
{
private sealed class StubExecutor : IScannerExecutor
{
private readonly ScannerExecutionResult _result;
public StubExecutor(ScannerExecutionResult result)

View File

@@ -19,16 +19,20 @@ public sealed class EgressPolicyHttpMessageHandlerTests
{
Mode = EgressPolicyMode.Sealed
};
options.AddAllowRule(example.com);
options.AddAllowRule("example.com");
var policy = new EgressPolicy(options);
var handler = new EgressPolicyHttpMessageHandler(policy, NullLogger<EgressPolicyHttpMessageHandler>.Instance, cli, test)
var handler = new EgressPolicyHttpMessageHandler(
policy,
NullLogger.Instance,
component: "cli-tests",
intent: "allow-test")
{
InnerHandler = new StubHandler()
};
var client = new HttpClient(handler, disposeHandler: true);
var response = await client.GetAsync(https://example.com/resource, CancellationToken.None).ConfigureAwait(false);
using var client = new HttpClient(handler, disposeHandler: true);
var response = await client.GetAsync("https://example.com/resource", CancellationToken.None);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
}
@@ -42,15 +46,19 @@ public sealed class EgressPolicyHttpMessageHandlerTests
};
var policy = new EgressPolicy(options);
var handler = new EgressPolicyHttpMessageHandler(policy, NullLogger<EgressPolicyHttpMessageHandler>.Instance, cli, test)
var handler = new EgressPolicyHttpMessageHandler(
policy,
NullLogger.Instance,
component: "cli-tests",
intent: "deny-test")
{
InnerHandler = new StubHandler()
};
var client = new HttpClient(handler, disposeHandler: true);
using var client = new HttpClient(handler, disposeHandler: true);
var exception = await Assert.ThrowsAsync<AirGapEgressBlockedException>(
() => client.GetAsync(https://blocked.example, CancellationToken.None)).ConfigureAwait(false);
() => client.GetAsync("https://blocked.example", CancellationToken.None));
Assert.Contains(AirGapEgressBlockedException.ErrorCode, exception.Message, StringComparison.OrdinalIgnoreCase);
}

View File

@@ -574,7 +574,7 @@ public sealed class BackendOperationsClientTests
var result = await client.TriggerJobAsync("test", new Dictionary<string, object?>(), CancellationToken.None);
Assert.True(result.Success);
var metadata = Assert.NotNull(tokenClient.LastAdditionalParameters);
var metadata = Assert.IsAssignableFrom<IReadOnlyDictionary<string, string>>(tokenClient.LastAdditionalParameters);
Assert.Equal("Resume operations", metadata["operator_reason"]);
Assert.Equal("INC-6006", metadata["operator_ticket"]);
Assert.Equal("Historical rebuild", metadata["backfill_reason"]);

View File

@@ -0,0 +1,26 @@
using System.Collections.Generic;
namespace StellaOps.Concelier.WebService.Contracts;
public sealed record AdvisoryChunkCollectionResponse(
string AdvisoryKey,
int Total,
bool Truncated,
IReadOnlyList<AdvisoryChunkItemResponse> Chunks,
IReadOnlyList<AdvisoryChunkSourceResponse> Sources);
public sealed record AdvisoryChunkItemResponse(
string DocumentId,
string ChunkId,
string Section,
string ParagraphId,
string Text,
IReadOnlyDictionary<string, string> Metadata);
public sealed record AdvisoryChunkSourceResponse(
string ObservationId,
string DocumentId,
string Format,
string Vendor,
string ContentHash,
DateTimeOffset CreatedAt);

View File

@@ -111,14 +111,17 @@ internal static class JobRegistrationExtensions
private static void ConfigureMergeJob(JobSchedulerOptions options, IConfiguration configuration)
{
var noMergeEnabled = configuration.GetValue<bool?>("concelier:features:noMergeEnabled") ?? true;
var noMergeEnabled = configuration.GetValue<bool?>("concelier:features:noMergeEnabled")
?? configuration.GetValue<bool?>("features:noMergeEnabled")
?? true;
if (noMergeEnabled)
{
options.Definitions.Remove(MergeReconcileBuiltInJob.Kind);
return;
}
var allowlist = configuration.GetSection("concelier:jobs:merge:allowlist").Get<string[]>();
var allowlist = configuration.GetSection("concelier:jobs:merge:allowlist").Get<string[]>()
?? configuration.GetSection("jobs:merge:allowlist").Get<string[]>();
if (allowlist is { Length: > 0 })
{
var allowlistSet = new HashSet<string>(allowlist, StringComparer.OrdinalIgnoreCase);

View File

@@ -17,6 +17,8 @@ public sealed class ConcelierOptions
public MirrorOptions Mirror { get; set; } = new();
public FeaturesOptions Features { get; set; } = new();
public AdvisoryChunkOptions AdvisoryChunks { get; set; } = new();
public sealed class StorageOptions
{
@@ -81,6 +83,8 @@ public sealed class ConcelierOptions
public IList<string> RequiredScopes { get; set; } = new List<string>();
public IList<string> RequiredTenants { get; set; } = new List<string>();
public IList<string> BypassNetworks { get; set; } = new List<string>();
public string? ClientId { get; set; }
@@ -146,4 +150,19 @@ public sealed class ConcelierOptions
public IList<string> MergeJobAllowlist { get; } = new List<string>();
}
public sealed class AdvisoryChunkOptions
{
public int DefaultChunkLimit { get; set; } = 200;
public int MaxChunkLimit { get; set; } = 400;
public int DefaultObservationLimit { get; set; } = 24;
public int MaxObservationLimit { get; set; } = 48;
public int DefaultMinimumLength { get; set; } = 64;
public int MaxMinimumLength { get; set; } = 512;
}
}

View File

@@ -30,11 +30,14 @@ public static class ConcelierOptionsValidator
options.Authority ??= new ConcelierOptions.AuthorityOptions();
options.Authority.Resilience ??= new ConcelierOptions.AuthorityOptions.ResilienceOptions();
options.Authority.RequiredTenants ??= new List<string>();
NormalizeList(options.Authority.Audiences, toLower: false);
NormalizeList(options.Authority.RequiredScopes, toLower: true);
NormalizeList(options.Authority.BypassNetworks, toLower: false);
NormalizeList(options.Authority.ClientScopes, toLower: true);
NormalizeList(options.Authority.RequiredTenants, toLower: true);
ValidateResilience(options.Authority.Resilience);
ValidateTenantAllowlist(options.Authority.RequiredTenants);
if (options.Authority.RequiredScopes.Count == 0)
{
@@ -133,6 +136,9 @@ public static class ConcelierOptionsValidator
options.Mirror ??= new ConcelierOptions.MirrorOptions();
ValidateMirror(options.Mirror);
options.AdvisoryChunks ??= new ConcelierOptions.AdvisoryChunkOptions();
ValidateAdvisoryChunks(options.AdvisoryChunks);
}
private static void NormalizeList(IList<string> values, bool toLower)
@@ -190,6 +196,32 @@ public static class ConcelierOptionsValidator
}
}
private static void ValidateTenantAllowlist(IList<string> tenants)
{
if (tenants is null || tenants.Count == 0)
{
return;
}
foreach (var tenant in tenants)
{
if (string.IsNullOrEmpty(tenant) || tenant.Length > 64)
{
throw new InvalidOperationException("Authority requiredTenants entries must be between 1 and 64 characters.");
}
foreach (var ch in tenant)
{
var isAlpha = ch is >= 'a' and <= 'z';
var isDigit = ch is >= '0' and <= '9';
if (!isAlpha && !isDigit && ch != '-')
{
throw new InvalidOperationException($"Authority requiredTenants entry '{tenant}' contains invalid character '{ch}'. Use lowercase letters, digits, or '-'.");
}
}
}
}
private static void ValidateMirror(ConcelierOptions.MirrorOptions mirror)
{
if (mirror.MaxIndexRequestsPerHour < 0)
@@ -242,4 +274,37 @@ public static class ConcelierOptionsValidator
throw new InvalidOperationException("Mirror distribution requires at least one domain when enabled.");
}
}
private static void ValidateAdvisoryChunks(ConcelierOptions.AdvisoryChunkOptions chunks)
{
if (chunks.DefaultChunkLimit <= 0)
{
throw new InvalidOperationException("Advisory chunk defaultChunkLimit must be greater than zero.");
}
if (chunks.MaxChunkLimit < chunks.DefaultChunkLimit)
{
throw new InvalidOperationException("Advisory chunk maxChunkLimit must be greater than or equal to defaultChunkLimit.");
}
if (chunks.DefaultObservationLimit <= 0)
{
throw new InvalidOperationException("Advisory chunk defaultObservationLimit must be greater than zero.");
}
if (chunks.MaxObservationLimit < chunks.DefaultObservationLimit)
{
throw new InvalidOperationException("Advisory chunk maxObservationLimit must be greater than or equal to defaultObservationLimit.");
}
if (chunks.DefaultMinimumLength <= 0)
{
throw new InvalidOperationException("Advisory chunk defaultMinimumLength must be greater than zero.");
}
if (chunks.MaxMinimumLength < chunks.DefaultMinimumLength)
{
throw new InvalidOperationException("Advisory chunk maxMinimumLength must be greater than or equal to defaultMinimumLength.");
}
}
}

View File

@@ -235,6 +235,12 @@ var resolvedConcelierOptions = app.Services.GetRequiredService<IOptions<Concelie
var resolvedAuthority = resolvedConcelierOptions.Authority ?? new ConcelierOptions.AuthorityOptions();
authorityConfigured = resolvedAuthority.Enabled;
var enforceAuthority = resolvedAuthority.Enabled && !resolvedAuthority.AllowAnonymousFallback;
var requiredTenants = (resolvedAuthority.RequiredTenants ?? Array.Empty<string>())
.Select(static tenant => tenant?.Trim().ToLowerInvariant())
.Where(static tenant => !string.IsNullOrWhiteSpace(tenant))
.Distinct(StringComparer.Ordinal)
.ToImmutableHashSet(StringComparer.Ordinal);
var enforceTenantAllowlist = !requiredTenants.IsEmpty;
if (resolvedAuthority.Enabled && resolvedAuthority.AllowAnonymousFallback)
{
@@ -358,11 +364,14 @@ var advisoryIngestEndpoint = app.MapPost("/ingest/advisory", async (
AdvisoryIngestRequest request,
[FromServices] IAdvisoryRawService rawService,
[FromServices] TimeProvider timeProvider,
[FromServices] ILogger<Program> logger,
CancellationToken cancellationToken) =>
{
ApplyNoCache(context.Response);
if (request is null || request.Source is null || request.Upstream is null || request.Content is null || request.Identifiers is null)
var ingestRequest = request;
if (ingestRequest is null || ingestRequest.Source is null || ingestRequest.Upstream is null || ingestRequest.Content is null || ingestRequest.Identifiers is null)
{
return Problem(context, "Invalid request", StatusCodes.Status400BadRequest, ProblemTypes.Validation, "source, upstream, content, and identifiers sections are required.");
}
@@ -381,7 +390,14 @@ var advisoryIngestEndpoint = app.MapPost("/ingest/advisory", async (
AdvisoryRawDocument document;
try
{
document = AdvisoryRawRequestMapper.Map(request, tenant, timeProvider);
logger.LogWarning(
"Binding advisory ingest request hash={Hash}",
ingestRequest.Upstream.ContentHash ?? "(null)");
document = AdvisoryRawRequestMapper.Map(ingestRequest, tenant, timeProvider);
logger.LogWarning(
"Mapped advisory_raw document hash={Hash}",
string.IsNullOrWhiteSpace(document.Upstream.ContentHash) ? "(empty)" : document.Upstream.ContentHash);
}
catch (Exception ex) when (ex is ArgumentException or InvalidOperationException)
{
@@ -418,6 +434,15 @@ var advisoryIngestEndpoint = app.MapPost("/ingest/advisory", async (
}
catch (ConcelierAocGuardException guardException)
{
logger.LogWarning(
guardException,
"AOC guard rejected advisory ingest tenant={Tenant} upstream={UpstreamId} requestHash={RequestHash} documentHash={DocumentHash} codes={Codes}",
tenant,
document.Upstream.UpstreamId,
request!.Upstream?.ContentHash ?? "(null)",
string.IsNullOrWhiteSpace(document.Upstream.ContentHash) ? "(empty)" : document.Upstream.ContentHash,
string.Join(',', guardException.Violations.Select(static violation => violation.ErrorCode)));
IngestionMetrics.ViolationCounter.Add(1, new[]
{
new KeyValuePair<string, object?>("tenant", tenant),
@@ -945,6 +970,11 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
return null;
}
if (enforceTenantAllowlist && !requiredTenants.Contains(tenant))
{
return Results.Forbid();
}
var principal = context.User;
if (enforceAuthority && (principal?.Identity?.IsAuthenticated != true))
@@ -965,6 +995,11 @@ IResult? EnsureTenantAuthorized(HttpContext context, string tenant)
{
return Results.Forbid();
}
if (enforceTenantAllowlist && !requiredTenants.Contains(normalizedClaim))
{
return Results.Forbid();
}
}
return null;

View File

@@ -0,0 +1,257 @@
using System.Collections.Immutable;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Concelier.Models.Observations;
using StellaOps.Concelier.WebService.Contracts;
namespace StellaOps.Concelier.WebService.Services;
internal sealed record AdvisoryChunkBuildOptions(
string AdvisoryKey,
int ChunkLimit,
int ObservationLimit,
ImmutableHashSet<string> SectionFilter,
ImmutableHashSet<string> FormatFilter,
int MinimumLength);
internal sealed class AdvisoryChunkBuilder
{
private const int DefaultMinLength = 40;
public AdvisoryChunkCollectionResponse Build(
AdvisoryChunkBuildOptions options,
IReadOnlyList<AdvisoryObservation> observations)
{
var chunks = new List<AdvisoryChunkItemResponse>(Math.Min(options.ChunkLimit, 256));
var sources = new List<AdvisoryChunkSourceResponse>();
var total = 0;
var truncated = false;
foreach (var observation in observations
.OrderByDescending(o => o.CreatedAt))
{
if (sources.Count >= options.ObservationLimit)
{
truncated = truncated || chunks.Count == options.ChunkLimit;
break;
}
if (options.FormatFilter.Count > 0 &&
!options.FormatFilter.Contains(observation.Content.Format))
{
continue;
}
var documentId = DetermineDocumentId(observation);
sources.Add(new AdvisoryChunkSourceResponse(
observation.ObservationId,
documentId,
observation.Content.Format,
observation.Source.Vendor,
observation.Upstream.ContentHash,
observation.CreatedAt));
foreach (var chunk in ExtractChunks(observation, documentId, options))
{
total++;
if (chunks.Count < options.ChunkLimit)
{
chunks.Add(chunk);
}
else
{
truncated = true;
break;
}
}
if (truncated)
{
break;
}
}
if (!truncated)
{
total = chunks.Count;
}
return new AdvisoryChunkCollectionResponse(
options.AdvisoryKey,
total,
truncated,
chunks,
sources);
}
private static string DetermineDocumentId(AdvisoryObservation observation)
{
if (!string.IsNullOrWhiteSpace(observation.Upstream.UpstreamId))
{
return observation.Upstream.UpstreamId;
}
return observation.ObservationId;
}
private static IEnumerable<AdvisoryChunkItemResponse> ExtractChunks(
AdvisoryObservation observation,
string documentId,
AdvisoryChunkBuildOptions options)
{
var root = observation.Content.Raw;
if (root is null)
{
yield break;
}
var stack = new Stack<(JsonNode Node, string Path, string Section)>();
stack.Push((root, string.Empty, string.Empty));
while (stack.Count > 0)
{
var (node, path, section) = stack.Pop();
if (node is null)
{
continue;
}
switch (node)
{
case JsonValue value when TryNormalize(value, out var text):
if (text.Length < Math.Max(options.MinimumLength, DefaultMinLength))
{
continue;
}
if (!ContainsLetter(text))
{
continue;
}
var resolvedSection = string.IsNullOrEmpty(section) ? documentId : section;
if (options.SectionFilter.Count > 0 && !options.SectionFilter.Contains(resolvedSection))
{
continue;
}
var paragraphId = string.IsNullOrEmpty(path) ? resolvedSection : path;
var chunkId = CreateChunkId(documentId, paragraphId);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["path"] = paragraphId,
["section"] = resolvedSection,
["format"] = observation.Content.Format
};
if (!string.IsNullOrEmpty(observation.Content.SpecVersion))
{
metadata["specVersion"] = observation.Content.SpecVersion!;
}
yield return new AdvisoryChunkItemResponse(
documentId,
chunkId,
resolvedSection,
paragraphId,
text,
metadata);
break;
case JsonObject obj:
foreach (var property in obj.Reverse())
{
var childSection = string.IsNullOrEmpty(section) ? property.Key : section;
var childPath = AppendPath(path, property.Key);
if (property.Value is { } childNode)
{
stack.Push((childNode, childPath, childSection));
}
}
break;
case JsonArray array:
for (var index = array.Count - 1; index >= 0; index--)
{
var childPath = AppendIndex(path, index);
if (array[index] is { } childNode)
{
stack.Push((childNode, childPath, section));
}
}
break;
}
}
}
private static bool TryNormalize(JsonValue value, out string normalized)
{
normalized = string.Empty;
if (!value.TryGetValue(out string? text) || text is null)
{
return false;
}
var span = text.AsSpan();
var builder = new StringBuilder(span.Length);
var previousWhitespace = false;
foreach (var ch in span)
{
if (char.IsControl(ch) && !char.IsWhiteSpace(ch))
{
continue;
}
if (char.IsWhiteSpace(ch))
{
if (previousWhitespace)
{
continue;
}
builder.Append(' ');
previousWhitespace = true;
}
else
{
builder.Append(ch);
previousWhitespace = false;
}
}
normalized = builder.ToString().Trim();
return normalized.Length > 0;
}
private static bool ContainsLetter(string text)
=> text.Any(static ch => char.IsLetter(ch));
private static string AppendPath(string path, string? segment)
{
var safeSegment = segment ?? string.Empty;
return string.IsNullOrEmpty(path) ? safeSegment : string.Concat(path, '.', safeSegment);
}
private static string AppendIndex(string path, int index)
{
if (string.IsNullOrEmpty(path))
{
return $"[{index}]";
}
return string.Concat(path, '[', index.ToString(CultureInfo.InvariantCulture), ']');
}
private static string CreateChunkId(string documentId, string paragraphId)
{
var input = string.Concat(documentId, '|', paragraphId);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return string.Concat(documentId, ':', Convert.ToHexString(hash.AsSpan(0, 8)));
}
}

View File

@@ -63,7 +63,7 @@
| ID | Status | Owner(s) | Depends on | Notes |
|----|--------|----------|------------|-------|
| CONCELIER-AIAI-31-001 `Paragraph anchors` | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Expose advisory chunk API returning paragraph anchors, section metadata, and token-safe text for Advisory AI retrieval. |
| CONCELIER-AIAI-31-001 `Paragraph anchors` | DONE | Concelier WebService Guild | CONCELIER-VULN-29-001 | Expose advisory chunk API returning paragraph anchors, section metadata, and token-safe text for Advisory AI retrieval. See docs/updates/2025-11-07-concelier-advisory-chunks.md. |
| CONCELIER-AIAI-31-002 `Structured fields` | TODO | Concelier WebService Guild | CONCELIER-AIAI-31-001 | Ensure observation APIs expose upstream workaround/fix/CVSS fields with provenance; add caching for summary queries. |
| CONCELIER-AIAI-31-003 `Advisory AI telemetry` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-AIAI-31-001 | Emit metrics/logs for chunk requests, cache hits, and guardrail blocks triggered by advisory payloads. |

View File

@@ -12,7 +12,7 @@
| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DONE (2025-11-06) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only.<br>2025-10-29 19:05Z: Audit completed for `AdvisoryRawService`/Mongo repo to confirm alias order/dedup removal persists; identified remaining normalization in observation/linkset factory that will be revised to surface raw duplicates for Policy ingestion. Change sketch + regression matrix drafted under `docs/dev/aoc-normalization-removal-notes.md` (pending commit).<br>2025-10-31 20:45Z: Added raw linkset projection to observations/storage, exposing canonical+raw views, refreshed fixtures/tests, and documented behaviour in models/doc factory.<br>2025-10-31 21:10Z: Coordinated with Policy Engine (POLICY-ENGINE-20-003) on adoption timeline; backfill + consumer readiness tracked in `docs/dev/raw-linkset-backfill-plan.md`.<br>2025-11-05 14:25Z: Resuming to document merge-dependent normalization paths and prepare implementation notes for `noMergeEnabled` gating before code changes land.<br>2025-11-05 19:20Z: Observation factory/linkset now preserve upstream ordering + duplicates; canonicalisation responsibility shifts to downstream consumers with refreshed unit coverage.<br>2025-11-06 16:10Z: Updated AOC reference/backfill docs with raw vs canonical guidance and cross-linked analyzer guardrails.<br>2025-11-06 23:40Z: Final pass preserves raw alias casing/whitespace end-to-end; query filters now compare case-insensitively, exporter fixtures refreshed, and docs aligned. Tests: `StellaOps.Concelier.Models/Core/Storage.Mongo.Tests` green on .NET 10 preview. |
> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout.
> 2025-10-29: `AdvisoryRawService` now preserves upstream alias/linkset ordering (trim-only) and updated AOC documentation reflects the behaviour; follow-up to ensure policy consumers handle duplicates remains open.
| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. |
| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | DONE (2025-11-07) | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. |
## Policy Engine v2

View File

@@ -18,7 +18,9 @@ public static class MergeServiceCollectionExtensions
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
var noMergeEnabled = configuration.GetValue<bool?>("concelier:features:noMergeEnabled") ?? true;
var noMergeEnabled = configuration.GetValue<bool?>("concelier:features:noMergeEnabled")
?? configuration.GetValue<bool?>("features:noMergeEnabled")
?? true;
if (noMergeEnabled)
{
return services;

View File

@@ -10,6 +10,6 @@
| Task | Owner(s) | Depends on | Notes |
|---|---|---|---|
|MERGE-LNM-21-001 Migration plan authoring|BE-Merge, Architecture Guild|CONCELIER-LNM-21-101|**DONE (2025-11-03)** Authored `docs/migration/no-merge.md` with rollout phases, backfill/validation checklists, rollback guidance, and ownership matrix for the Link-Not-Merge cutover.|
|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|**DOING (2025-11-06)** Defaulted `concelier:features:noMergeEnabled` to `true`, added merge job allowlist gate, and began rewiring guard/tier tests; follow-up work required to restore Concelier WebService test suite before declaring completion.<br>2025-11-05 14:42Z: Implemented `concelier:features:noMergeEnabled` gate, merge job allowlist checks, `[Obsolete]` markings, and analyzer scaffolding to steer consumers toward linkset APIs.<br>2025-11-06 16:10Z: Introduced Roslyn analyzer (`CONCELIER0002`) referenced by Concelier WebService + tests, documented suppression guidance, and updated migration playbook.<br>2025-11-07 03:25Z: Default-on toggle + job gating break existing Concelier WebService tests; guard + seed fixes pending to unblock ingest/mirror suites.|
|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|**DOING (2025-11-07)** Defaulted `concelier:features:noMergeEnabled` to `true`, added merge job allowlist gate, and began rewiring guard/tier tests; follow-up work required to restore Concelier WebService test suite before declaring completion.<br>2025-11-05 14:42Z: Implemented `concelier:features:noMergeEnabled` gate, merge job allowlist checks, `[Obsolete]` markings, and analyzer scaffolding to steer consumers toward linkset APIs.<br>2025-11-06 16:10Z: Introduced Roslyn analyzer (`CONCELIER0002`) referenced by Concelier WebService + tests, documented suppression guidance, and updated migration playbook.<br>2025-11-07 03:25Z: Default-on toggle + job gating break existing Concelier WebService tests; guard + seed fixes pending to unblock ingest/mirror suites.<br>2025-11-07 07:05Z: Added ingest logging + test log dumps to trace upstream hash loss; still chasing why Minimal API binding strips `upstream.contentHash` before the guard runs.|
> 2025-11-03: Catalogued call sites (WebService Program `AddMergeModule`, built-in job registration `merge:reconcile`, `MergeReconcileJob`) and confirmed unit tests are the only direct `MergeAsync` callers; next step is to define analyzer + replacement observability coverage.
|MERGE-LNM-21-003 Determinism/test updates|QA Guild, BE-Merge|MERGE-LNM-21-002|Replace merge determinism suites with observation/linkset regression tests verifying no data mutation and conflicts remain visible.|

View File

@@ -9,6 +9,7 @@ using System.Net;
using System.Net.Http.Json;
using System.Net.Http.Headers;
using System.Security.Claims;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.AspNetCore.Builder;
@@ -22,6 +23,7 @@ using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Mongo2Go;
using MongoDB.Bson;
using MongoDB.Bson.IO;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Core.Jobs;
@@ -29,6 +31,7 @@ using StellaOps.Concelier.Models;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Storage.Mongo;
using StellaOps.Concelier.Storage.Mongo.Observations;
using StellaOps.Concelier.Core.Raw;
using StellaOps.Concelier.WebService.Jobs;
using StellaOps.Concelier.WebService.Options;
using StellaOps.Concelier.WebService.Contracts;
@@ -36,6 +39,7 @@ using Xunit.Sdk;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.Client;
using Xunit;
using Xunit.Abstractions;
using Microsoft.IdentityModel.Protocols;
using Microsoft.IdentityModel.Protocols.OpenIdConnect;
using StellaOps.Concelier.WebService.Diagnostics;
@@ -50,9 +54,15 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
private const string TestSigningSecret = "0123456789ABCDEF0123456789ABCDEF";
private static readonly SymmetricSecurityKey TestSigningKey = new(Encoding.UTF8.GetBytes(TestSigningSecret));
private readonly ITestOutputHelper _output;
private MongoDbRunner _runner = null!;
private ConcelierApplicationFactory _factory = null!;
public WebServiceEndpointsTests(ITestOutputHelper output)
{
_output = output;
}
public Task InitializeAsync()
{
_runner = MongoDbRunner.Start(singleNodeReplSet: true);
@@ -200,17 +210,123 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
Assert.True(response.StatusCode == HttpStatusCode.BadRequest, $"Expected 400 but got {(int)response.StatusCode}: {body}");
}
[Fact]
public async Task AdvisoryChunksEndpoint_ReturnsParagraphAnchors()
{
var newestRaw = BsonDocument.Parse(
"""
{
"summary": {
"intro": "This is a deterministic summary paragraph describing CVE-2025-0001 with remediation context for Advisory AI consumers."
},
"details": [
"Long-form remediation guidance that exceeds the minimum length threshold and mentions affected packages.",
{
"body": "Nested context that Advisory AI can cite when rendering downstream explanations."
}
]
}
""");
var olderRaw = BsonDocument.Parse(
"""
{
"summary": {
"intro": "Older paragraph that should be visible when no section filter applies."
}
}
""");
var newerCreatedAt = new DateTime(2025, 1, 7, 0, 0, 0, DateTimeKind.Utc);
var olderCreatedAt = new DateTime(2025, 1, 5, 0, 0, 0, DateTimeKind.Utc);
var newerHash = ComputeContentHash(newestRaw);
var olderHash = ComputeContentHash(olderRaw);
var documents = new[]
{
CreateChunkObservationDocument(
id: "tenant-a:chunk:newest",
tenant: "tenant-a",
createdAt: newerCreatedAt,
alias: "cve-2025-0001",
rawDocument: newestRaw),
CreateChunkObservationDocument(
id: "tenant-a:chunk:older",
tenant: "tenant-a",
createdAt: olderCreatedAt,
alias: "cve-2025-0001",
rawDocument: olderRaw)
};
await SeedObservationDocumentsAsync(documents);
await SeedAdvisoryRawDocumentsAsync(
CreateAdvisoryRawDocument("tenant-a", "nvd", "tenant-a:chunk:newest", newerHash, newestRaw.DeepClone().AsBsonDocument),
CreateAdvisoryRawDocument("tenant-a", "nvd", "tenant-a:chunk:older", olderHash, olderRaw.DeepClone().AsBsonDocument));
using var client = _factory.CreateClient();
var response = await client.GetAsync("/advisories/cve-2025-0001/chunks?tenant=tenant-a&section=summary&format=csaf");
response.EnsureSuccessStatusCode();
var payload = await response.Content.ReadAsStringAsync();
using var document = JsonDocument.Parse(payload);
var root = document.RootElement;
Assert.Equal("cve-2025-0001", root.GetProperty("advisoryKey").GetString());
Assert.Equal(1, root.GetProperty("total").GetInt32());
Assert.False(root.GetProperty("truncated").GetBoolean());
var chunk = Assert.Single(root.GetProperty("chunks").EnumerateArray());
Assert.Equal("summary", chunk.GetProperty("section").GetString());
Assert.Equal("summary.intro", chunk.GetProperty("paragraphId").GetString());
var text = chunk.GetProperty("text").GetString();
Assert.False(string.IsNullOrWhiteSpace(text));
Assert.Contains("deterministic summary paragraph", text, StringComparison.OrdinalIgnoreCase);
var metadata = chunk.GetProperty("metadata");
Assert.Equal("summary.intro", metadata.GetProperty("path").GetString());
Assert.Equal("csaf", metadata.GetProperty("format").GetString());
var sources = root.GetProperty("sources").EnumerateArray().ToArray();
Assert.Equal(2, sources.Length);
Assert.Equal("tenant-a:chunk:newest", sources[0].GetProperty("observationId").GetString());
Assert.Equal("tenant-a:chunk:older", sources[1].GetProperty("observationId").GetString());
Assert.All(
sources,
source => Assert.True(string.Equals("csaf", source.GetProperty("format").GetString(), StringComparison.OrdinalIgnoreCase)));
}
[Fact]
public async Task AdvisoryChunksEndpoint_ReturnsNotFoundWhenAdvisoryMissing()
{
await SeedObservationDocumentsAsync(BuildSampleObservationDocuments());
using var client = _factory.CreateClient();
var response = await client.GetAsync("/advisories/cve-2099-9999/chunks?tenant=tenant-a");
Assert.Equal(HttpStatusCode.NotFound, response.StatusCode);
var payload = await response.Content.ReadAsStringAsync();
using var document = JsonDocument.Parse(payload);
var root = document.RootElement;
Assert.Equal("https://stellaops.org/problems/not-found", root.GetProperty("type").GetString());
Assert.Equal("Advisory not found", root.GetProperty("title").GetString());
Assert.Contains("cve-2099-9999", root.GetProperty("detail").GetString(), StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task AdvisoryIngestEndpoint_PersistsDocumentAndSupportsReadback()
{
using var client = _factory.CreateClient();
client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-ingest");
const string upstreamId = "GHSA-INGEST-0001";
var ingestRequest = BuildAdvisoryIngestRequest(
contentHash: "sha256:abc123",
upstreamId: "GHSA-INGEST-0001");
contentHash: null,
upstreamId: upstreamId);
var ingestResponse = await client.PostAsJsonAsync("/ingest/advisory", ingestRequest);
if (ingestResponse.StatusCode != HttpStatusCode.Created)
{
WriteProgramLogs();
}
Assert.Equal(HttpStatusCode.Created, ingestResponse.StatusCode);
var ingestPayload = await ingestResponse.Content.ReadFromJsonAsync<AdvisoryIngestResponse>();
@@ -218,7 +334,7 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
Assert.True(ingestPayload!.Inserted);
Assert.False(string.IsNullOrWhiteSpace(ingestPayload.Id));
Assert.Equal("tenant-ingest", ingestPayload.Tenant);
Assert.Equal("sha256:abc123", ingestPayload.ContentHash);
Assert.Equal(ComputeDeterministicContentHash(upstreamId), ingestPayload.ContentHash);
Assert.NotNull(ingestResponse.Headers.Location);
var locationValue = ingestResponse.Headers.Location!.ToString();
Assert.False(string.IsNullOrWhiteSpace(locationValue));
@@ -230,8 +346,8 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
Assert.Equal(ingestPayload.Id, decodedSegment);
var duplicateResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest(
contentHash: "sha256:abc123",
upstreamId: "GHSA-INGEST-0001"));
contentHash: null,
upstreamId: upstreamId));
Assert.Equal(HttpStatusCode.OK, duplicateResponse.StatusCode);
var duplicatePayload = await duplicateResponse.Content.ReadFromJsonAsync<AdvisoryIngestResponse>();
Assert.NotNull(duplicatePayload);
@@ -247,7 +363,7 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
Assert.NotNull(record);
Assert.Equal(ingestPayload.Id, record!.Id);
Assert.Equal("tenant-ingest", record.Tenant);
Assert.Equal("sha256:abc123", record.Document.Upstream.ContentHash);
Assert.Equal(ComputeDeterministicContentHash(upstreamId), record.Document.Upstream.ContentHash);
}
using (var listRequest = new HttpRequestMessage(HttpMethod.Get, "/advisories/raw?limit=10"))
@@ -451,6 +567,54 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
Assert.Equal(HttpStatusCode.Forbidden, crossTenantResponse.StatusCode);
}
[Fact]
public async Task AdvisoryIngestEndpoint_RejectsTenantOutsideAllowlist()
{
var environment = new Dictionary<string, string?>
{
["CONCELIER_AUTHORITY__ENABLED"] = "true",
["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false",
["CONCELIER_AUTHORITY__ISSUER"] = TestAuthorityIssuer,
["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false",
["CONCELIER_AUTHORITY__AUDIENCES__0"] = TestAuthorityAudience,
["CONCELIER_AUTHORITY__CLIENTID"] = "webservice-tests",
["CONCELIER_AUTHORITY__CLIENTSECRET"] = "unused",
["CONCELIER_AUTHORITY__REQUIREDTENANTS__0"] = "tenant-auth"
};
using var factory = new ConcelierApplicationFactory(
_runner.ConnectionString,
authority =>
{
authority.Enabled = true;
authority.AllowAnonymousFallback = false;
authority.Issuer = TestAuthorityIssuer;
authority.RequireHttpsMetadata = false;
authority.Audiences.Clear();
authority.Audiences.Add(TestAuthorityAudience);
authority.ClientId = "webservice-tests";
authority.ClientSecret = "unused";
authority.RequiredTenants.Clear();
authority.RequiredTenants.Add("tenant-auth");
},
environment);
using var client = factory.CreateClient();
var allowedToken = CreateTestToken("tenant-auth", StellaOpsScopes.AdvisoryIngest);
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", allowedToken);
client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-auth");
var allowedResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:allow-1", "GHSA-ALLOW-001"));
Assert.Equal(HttpStatusCode.Created, allowedResponse.StatusCode);
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", CreateTestToken("tenant-blocked", StellaOpsScopes.AdvisoryIngest));
client.DefaultRequestHeaders.Remove("X-Stella-Tenant");
client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-blocked");
var forbiddenResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:allow-2", "GHSA-ALLOW-002"));
Assert.Equal(HttpStatusCode.Forbidden, forbiddenResponse.StatusCode);
}
[Fact]
public async Task AdvisoryIngestEndpoint_ReturnsGuardViolationWhenContentHashMissing()
{
@@ -1244,6 +1408,55 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
};
}
private static AdvisoryObservationDocument CreateChunkObservationDocument(
string id,
string tenant,
DateTime createdAt,
string alias,
BsonDocument rawDocument)
{
var document = CreateObservationDocument(
id,
tenant,
createdAt,
aliases: new[] { alias });
var clone = rawDocument.DeepClone().AsBsonDocument;
document.Content.Raw = clone;
document.Upstream.ContentHash = ComputeContentHash(clone);
return document;
}
private static readonly DateTimeOffset DefaultIngestTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static string ComputeContentHash(BsonDocument rawDocument)
{
using var sha256 = SHA256.Create();
var canonical = rawDocument.ToJson(new JsonWriterSettings
{
OutputMode = JsonOutputMode.RelaxedExtendedJson
});
var bytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(canonical));
return $"sha256:{Convert.ToHexString(bytes).ToLowerInvariant()}";
}
private static string ComputeDeterministicContentHash(string upstreamId)
{
var raw = CreateJsonElement($@"{{""id"":""{upstreamId}"",""modified"":""{DefaultIngestTimestamp:O}""}}");
return NormalizeContentHash(null, raw, enforceContentHash: true);
}
private static string NormalizeContentHash(string? value, JsonElement raw, bool enforceContentHash)
{
if (!enforceContentHash)
{
return value ?? string.Empty;
}
using var sha256 = SHA256.Create();
var bytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(raw.GetRawText()));
return $"sha256:{Convert.ToHexString(bytes).ToLowerInvariant()}";
}
private sealed record ReplayResponse(
string VulnerabilityKey,
DateTimeOffset? AsOf,
@@ -1690,8 +1903,18 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
return $"advisory_raw:{vendorSegment}:{upstreamSegment}:{hashSegment}";
}
private static AdvisoryIngestRequest BuildAdvisoryIngestRequest(string contentHash, string upstreamId)
private void WriteProgramLogs()
{
var entries = _factory.LoggerProvider.Snapshot("StellaOps.Concelier.WebService.Program");
foreach (var entry in entries)
{
_output.WriteLine($"[PROGRAM LOG] {entry.Level}: {entry.Message}");
}
}
private static AdvisoryIngestRequest BuildAdvisoryIngestRequest(string? contentHash, string upstreamId)
{
var normalizedContentHash = contentHash ?? ComputeDeterministicContentHash(upstreamId);
var raw = CreateJsonElement($@"{{""id"":""{upstreamId}"",""modified"":""{DateTime.UtcNow:O}""}}");
var references = new[]
{
@@ -1704,7 +1927,7 @@ public sealed class WebServiceEndpointsTests : IAsyncLifetime
upstreamId,
"2025-01-01T00:00:00Z",
DateTimeOffset.UtcNow,
contentHash,
normalizedContentHash,
new AdvisorySignatureRequest(false, null, null, null, null, null),
new Dictionary<string, string> { ["http.method"] = "GET" }),
new AdvisoryContentRequest("osv", "1.3.0", raw, null),

View File

@@ -12,6 +12,7 @@ using StellaOps.Scanner.Sbomer.BuildXPlugin.Attestation;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Cas;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Descriptor;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Manifest;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Surface;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin;
@@ -131,6 +132,12 @@ internal static class Program
Console.WriteLine(" --attestor <url> (descriptor) Optional Attestor endpoint for provenance placeholders.");
Console.WriteLine(" --attestor-token <token> Bearer token for Attestor requests (or STELLAOPS_ATTESTOR_TOKEN).");
Console.WriteLine(" --attestor-insecure Skip TLS verification for Attestor requests (dev/test only).");
Console.WriteLine(" --surface-layer-fragments <path> Persist layer fragments JSON into Surface.FS.");
Console.WriteLine(" --surface-entrytrace-graph <path> Persist EntryTrace graph JSON into Surface.FS.");
Console.WriteLine(" --surface-entrytrace-ndjson <path> Persist EntryTrace NDJSON into Surface.FS.");
Console.WriteLine(" --surface-cache-root <path> Override Surface cache root (defaults to CAS root).");
Console.WriteLine(" --surface-bucket <name> Bucket name used in Surface CAS URIs (default scanner-artifacts).");
Console.WriteLine(" --surface-tenant <tenant> Tenant identifier recorded in the Surface manifest.");
return 0;
}
@@ -186,6 +193,11 @@ internal static class Program
private static async Task<int> RunDescriptorAsync(string[] args, CancellationToken cancellationToken)
{
var manifestDirectory = ResolveManifestDirectory(args);
var loader = new BuildxPluginManifestLoader(manifestDirectory);
var manifest = await loader.LoadDefaultAsync(cancellationToken).ConfigureAwait(false);
var casRoot = ResolveCasRoot(args, manifest);
var imageDigest = RequireOption(args, "--image");
var sbomPath = RequireOption(args, "--sbom");
@@ -244,11 +256,110 @@ internal static class Program
await attestorClient.SendPlaceholderAsync(attestorUri, document, cancellationToken).ConfigureAwait(false);
}
await TryPublishSurfaceArtifactsAsync(args, request, casRoot, version, cancellationToken).ConfigureAwait(false);
var json = JsonSerializer.Serialize(document, DescriptorJsonOptions);
Console.WriteLine(json);
return 0;
}
private static async Task TryPublishSurfaceArtifactsAsync(
string[] args,
DescriptorRequest descriptorRequest,
string casRoot,
string generatorVersion,
CancellationToken cancellationToken)
{
var surfaceOptions = ResolveSurfaceOptions(args, descriptorRequest, casRoot, generatorVersion);
if (surfaceOptions is null || !surfaceOptions.HasArtifacts)
{
return;
}
var writer = new SurfaceManifestWriter(TimeProvider.System);
var result = await writer.WriteAsync(surfaceOptions, cancellationToken).ConfigureAwait(false);
if (result is null)
{
return;
}
Console.Error.WriteLine($"surface manifest stored: {result.ManifestUri} ({result.Document.Artifacts.Count} artefacts)");
}
private static SurfaceOptions? ResolveSurfaceOptions(
string[] args,
DescriptorRequest descriptorRequest,
string casRoot,
string generatorVersion)
{
var layerFragmentsPath = GetOption(args, "--surface-layer-fragments")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_LAYER_FRAGMENTS");
var entryTraceGraphPath = GetOption(args, "--surface-entrytrace-graph")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_ENTRYTRACE_GRAPH");
var entryTraceNdjsonPath = GetOption(args, "--surface-entrytrace-ndjson")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_ENTRYTRACE_NDJSON");
if (string.IsNullOrWhiteSpace(layerFragmentsPath) &&
string.IsNullOrWhiteSpace(entryTraceGraphPath) &&
string.IsNullOrWhiteSpace(entryTraceNdjsonPath))
{
return null;
}
var cacheRoot = GetOption(args, "--surface-cache-root")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_CACHE_ROOT")
?? casRoot;
var bucket = GetOption(args, "--surface-bucket")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_BUCKET")
?? SurfaceCasLayout.DefaultBucket;
var rootPrefix = GetOption(args, "--surface-root-prefix")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_ROOT_PREFIX")
?? SurfaceCasLayout.DefaultRootPrefix;
var tenant = GetOption(args, "--surface-tenant")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_TENANT")
?? "default";
var component = GetOption(args, "--surface-component")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_COMPONENT")
?? "scanner.buildx";
var componentVersion = GetOption(args, "--surface-component-version")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_COMPONENT_VERSION")
?? generatorVersion;
var workerInstance = GetOption(args, "--surface-worker-instance")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_WORKER_INSTANCE")
?? Environment.MachineName;
var attemptValue = GetOption(args, "--surface-attempt")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_ATTEMPT");
var attempt = 1;
if (!string.IsNullOrWhiteSpace(attemptValue) && int.TryParse(attemptValue, out var parsedAttempt) && parsedAttempt > 0)
{
attempt = parsedAttempt;
}
var scanId = GetOption(args, "--surface-scan-id")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_SCAN_ID")
?? descriptorRequest.SbomName
?? descriptorRequest.ImageDigest;
var manifestOutput = GetOption(args, "--surface-manifest-output")
?? Environment.GetEnvironmentVariable("STELLAOPS_SURFACE_MANIFEST_OUTPUT");
return new SurfaceOptions(
CacheRoot: cacheRoot,
CacheBucket: bucket,
RootPrefix: rootPrefix,
Tenant: tenant,
Component: component,
ComponentVersion: componentVersion,
WorkerInstance: workerInstance,
Attempt: attempt,
ImageDigest: descriptorRequest.ImageDigest,
ScanId: scanId,
LayerFragmentsPath: layerFragmentsPath,
EntryTraceGraphPath: entryTraceGraphPath,
EntryTraceNdjsonPath: entryTraceNdjsonPath,
ManifestOutputPath: manifestOutput);
}
private static string? GetOption(string[] args, string optionName)
{
for (var i = 0; i < args.Length; i++)

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Scanner.Sbomer.BuildXPlugin.Tests")]

View File

@@ -12,9 +12,13 @@
<InformationalVersion>0.1.0-alpha</InformationalVersion>
</PropertyGroup>
<ItemGroup>
<Content Include="stellaops.sbom-indexer.manifest.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
</ItemGroup>
</Project>
<ItemGroup>
<Content Include="stellaops.sbom-indexer.manifest.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\__Libraries\\StellaOps.Scanner.Surface.FS\\StellaOps.Scanner.Surface.FS.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,112 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Surface;
internal static class SurfaceCasLayout
{
internal const string DefaultBucket = "scanner-artifacts";
internal const string DefaultRootPrefix = "scanner";
private const string Sha256 = "sha256";
public static string NormalizeDigest(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
throw new BuildxPluginException("Surface artefact digest cannot be empty.");
}
var trimmed = digest.Trim();
return trimmed.Contains(':', StringComparison.Ordinal)
? trimmed
: $"{Sha256}:{trimmed}";
}
public static string ExtractDigestValue(string normalizedDigest)
{
var parts = normalizedDigest.Split(':', 2, StringSplitOptions.TrimEntries | StringSplitOptions.RemoveEmptyEntries);
return parts.Length == 2 ? parts[1] : normalizedDigest;
}
public static string BuildObjectKey(string rootPrefix, SurfaceCasKind kind, string normalizedDigest)
{
var digestValue = ExtractDigestValue(normalizedDigest);
var prefix = kind switch
{
SurfaceCasKind.LayerFragments => "surface/payloads/layer-fragments",
SurfaceCasKind.EntryTraceGraph => "surface/payloads/entrytrace",
SurfaceCasKind.EntryTraceNdjson => "surface/payloads/entrytrace",
SurfaceCasKind.Manifest => "surface/manifests",
_ => "surface/unknown"
};
var extension = kind switch
{
SurfaceCasKind.LayerFragments => "layer-fragments.json",
SurfaceCasKind.EntryTraceGraph => "entrytrace.graph.json",
SurfaceCasKind.EntryTraceNdjson => "entrytrace.ndjson",
SurfaceCasKind.Manifest => "surface.manifest.json",
_ => "artifact.bin"
};
var normalizedRoot = string.IsNullOrWhiteSpace(rootPrefix)
? string.Empty
: rootPrefix.Trim().Trim('/');
var relative = $"{prefix}/{digestValue}/{extension}";
return string.IsNullOrWhiteSpace(normalizedRoot) ? relative : $"{normalizedRoot}/{relative}";
}
public static string BuildCasUri(string bucket, string objectKey)
{
var normalizedBucket = string.IsNullOrWhiteSpace(bucket) ? DefaultBucket : bucket.Trim();
var normalizedKey = string.IsNullOrWhiteSpace(objectKey) ? string.Empty : objectKey.Trim().TrimStart('/');
return $"cas://{normalizedBucket}/{normalizedKey}";
}
public static string ComputeDigest(ReadOnlySpan<byte> content)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(content, hash);
return $"{Sha256}:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
public static async Task<string> WriteBytesAsync(string rootDirectory, string objectKey, byte[] bytes, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(rootDirectory))
{
throw new BuildxPluginException("Surface cache root must be provided.");
}
var normalizedRoot = Path.GetFullPath(rootDirectory);
var relativePath = objectKey.Replace('/', Path.DirectorySeparatorChar);
var fullPath = Path.Combine(normalizedRoot, relativePath);
var directory = Path.GetDirectoryName(fullPath);
if (!string.IsNullOrWhiteSpace(directory))
{
Directory.CreateDirectory(directory);
}
await using var stream = new FileStream(
fullPath,
FileMode.Create,
FileAccess.Write,
FileShare.Read,
bufferSize: 64 * 1024,
options: FileOptions.Asynchronous | FileOptions.SequentialScan);
await stream.WriteAsync(bytes.AsMemory(0, bytes.Length), cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
return fullPath;
}
}
internal enum SurfaceCasKind
{
LayerFragments,
EntryTraceGraph,
EntryTraceNdjson,
Manifest
}

View File

@@ -0,0 +1,227 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scanner.Surface.FS;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Surface;
internal sealed class SurfaceManifestWriter
{
private static readonly JsonSerializerOptions ManifestSerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
private readonly TimeProvider _timeProvider;
public SurfaceManifestWriter(TimeProvider timeProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
public async Task<SurfaceManifestWriteResult?> WriteAsync(SurfaceOptions options, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(options);
if (!options.HasArtifacts)
{
return null;
}
var cacheRoot = EnsurePath(options.CacheRoot, "Surface cache root must be provided.");
var bucket = string.IsNullOrWhiteSpace(options.CacheBucket)
? SurfaceCasLayout.DefaultBucket
: options.CacheBucket.Trim();
var rootPrefix = string.IsNullOrWhiteSpace(options.RootPrefix)
? SurfaceCasLayout.DefaultRootPrefix
: options.RootPrefix.Trim();
var tenant = string.IsNullOrWhiteSpace(options.Tenant)
? "default"
: options.Tenant.Trim();
var component = string.IsNullOrWhiteSpace(options.Component)
? "scanner.buildx"
: options.Component.Trim();
var componentVersion = string.IsNullOrWhiteSpace(options.ComponentVersion)
? null
: options.ComponentVersion.Trim();
var workerInstance = string.IsNullOrWhiteSpace(options.WorkerInstance)
? Environment.MachineName
: options.WorkerInstance.Trim();
var attempt = options.Attempt <= 0 ? 1 : options.Attempt;
var scanId = string.IsNullOrWhiteSpace(options.ScanId)
? options.ImageDigest
: options.ScanId!.Trim();
Directory.CreateDirectory(cacheRoot);
var artifacts = new List<SurfaceArtifactWriteResult>();
if (!string.IsNullOrWhiteSpace(options.EntryTraceGraphPath))
{
var descriptor = new SurfaceArtifactDescriptor(
Kind: "entrytrace.graph",
Format: "entrytrace.graph",
MediaType: "application/json",
View: null,
CasKind: SurfaceCasKind.EntryTraceGraph,
FilePath: EnsurePath(options.EntryTraceGraphPath!, "EntryTrace graph path is required."));
artifacts.Add(await PersistArtifactAsync(descriptor, cacheRoot, bucket, rootPrefix, cancellationToken).ConfigureAwait(false));
}
if (!string.IsNullOrWhiteSpace(options.EntryTraceNdjsonPath))
{
var descriptor = new SurfaceArtifactDescriptor(
Kind: "entrytrace.ndjson",
Format: "entrytrace.ndjson",
MediaType: "application/x-ndjson",
View: null,
CasKind: SurfaceCasKind.EntryTraceNdjson,
FilePath: EnsurePath(options.EntryTraceNdjsonPath!, "EntryTrace NDJSON path is required."));
artifacts.Add(await PersistArtifactAsync(descriptor, cacheRoot, bucket, rootPrefix, cancellationToken).ConfigureAwait(false));
}
if (!string.IsNullOrWhiteSpace(options.LayerFragmentsPath))
{
var descriptor = new SurfaceArtifactDescriptor(
Kind: "layer.fragments",
Format: "layer.fragments",
MediaType: "application/json",
View: "inventory",
CasKind: SurfaceCasKind.LayerFragments,
FilePath: EnsurePath(options.LayerFragmentsPath!, "Layer fragments path is required."));
artifacts.Add(await PersistArtifactAsync(descriptor, cacheRoot, bucket, rootPrefix, cancellationToken).ConfigureAwait(false));
}
if (artifacts.Count == 0)
{
return null;
}
var orderedArtifacts = artifacts
.Select(a => a.ManifestArtifact)
.OrderBy(a => a.Kind, StringComparer.Ordinal)
.ThenBy(a => a.Format, StringComparer.Ordinal)
.ToImmutableArray();
var timestamp = _timeProvider.GetUtcNow();
var manifestDocument = new SurfaceManifestDocument
{
Tenant = tenant,
ImageDigest = SurfaceCasLayout.NormalizeDigest(options.ImageDigest),
ScanId = scanId,
GeneratedAt = timestamp,
Source = new SurfaceManifestSource
{
Component = component,
Version = componentVersion,
WorkerInstance = workerInstance,
Attempt = attempt
},
Artifacts = orderedArtifacts
};
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifestDocument, ManifestSerializerOptions);
var manifestDigest = SurfaceCasLayout.ComputeDigest(manifestBytes);
var manifestKey = SurfaceCasLayout.BuildObjectKey(rootPrefix, SurfaceCasKind.Manifest, manifestDigest);
var manifestPath = await SurfaceCasLayout.WriteBytesAsync(cacheRoot, manifestKey, manifestBytes, cancellationToken).ConfigureAwait(false);
var manifestUri = SurfaceCasLayout.BuildCasUri(bucket, manifestKey);
if (!string.IsNullOrWhiteSpace(options.ManifestOutputPath))
{
var manifestOutputPath = Path.GetFullPath(options.ManifestOutputPath);
var manifestOutputDirectory = Path.GetDirectoryName(manifestOutputPath);
if (!string.IsNullOrWhiteSpace(manifestOutputDirectory))
{
Directory.CreateDirectory(manifestOutputDirectory);
}
await File.WriteAllBytesAsync(manifestOutputPath, manifestBytes, cancellationToken).ConfigureAwait(false);
}
return new SurfaceManifestWriteResult(
manifestDigest,
manifestUri,
manifestPath,
manifestDocument,
artifacts);
}
private static async Task<SurfaceArtifactWriteResult> PersistArtifactAsync(
SurfaceArtifactDescriptor descriptor,
string cacheRoot,
string bucket,
string rootPrefix,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (!File.Exists(descriptor.FilePath))
{
throw new BuildxPluginException($"Surface artefact file {descriptor.FilePath} was not found.");
}
var content = await File.ReadAllBytesAsync(descriptor.FilePath, cancellationToken).ConfigureAwait(false);
var digest = SurfaceCasLayout.ComputeDigest(content);
var objectKey = SurfaceCasLayout.BuildObjectKey(rootPrefix, descriptor.CasKind, digest);
var filePath = await SurfaceCasLayout.WriteBytesAsync(cacheRoot, objectKey, content, cancellationToken).ConfigureAwait(false);
var uri = SurfaceCasLayout.BuildCasUri(bucket, objectKey);
var storage = new SurfaceManifestStorage
{
Bucket = bucket,
ObjectKey = objectKey,
SizeBytes = content.Length,
ContentType = descriptor.MediaType
};
var artifact = new SurfaceManifestArtifact
{
Kind = descriptor.Kind,
Uri = uri,
Digest = digest,
MediaType = descriptor.MediaType,
Format = descriptor.Format,
SizeBytes = content.Length,
View = descriptor.View,
Storage = storage
};
return new SurfaceArtifactWriteResult(objectKey, filePath, artifact);
}
private static string EnsurePath(string value, string message)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new BuildxPluginException(message);
}
return Path.GetFullPath(value.Trim());
}
}
internal sealed record SurfaceArtifactDescriptor(
string Kind,
string Format,
string MediaType,
string? View,
SurfaceCasKind CasKind,
string FilePath);
internal sealed record SurfaceArtifactWriteResult(
string ObjectKey,
string FilePath,
SurfaceManifestArtifact ManifestArtifact);
internal sealed record SurfaceManifestWriteResult(
string ManifestDigest,
string ManifestUri,
string ManifestPath,
SurfaceManifestDocument Document,
IReadOnlyList<SurfaceArtifactWriteResult> Artifacts);

View File

@@ -0,0 +1,25 @@
using System;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Surface;
internal sealed record SurfaceOptions(
string CacheRoot,
string CacheBucket,
string RootPrefix,
string Tenant,
string Component,
string ComponentVersion,
string WorkerInstance,
int Attempt,
string ImageDigest,
string? ScanId,
string? LayerFragmentsPath,
string? EntryTraceGraphPath,
string? EntryTraceNdjsonPath,
string? ManifestOutputPath)
{
public bool HasArtifacts =>
!string.IsNullOrWhiteSpace(LayerFragmentsPath) ||
!string.IsNullOrWhiteSpace(EntryTraceGraphPath) ||
!string.IsNullOrWhiteSpace(EntryTraceNdjsonPath);
}

View File

@@ -2,6 +2,6 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCANNER-SURFACE-03 | DOING (2025-11-06) | BuildX Plugin Guild | SURFACE-FS-02 | Push layer manifests and entry fragments into Surface.FS during build-time SBOM generation.<br>2025-11-06: Kicked off manifest emitter wiring within BuildX export pipeline and outlined test fixtures targeting Surface.FS client mock. | BuildX integration tests confirm cache population; CLI docs updated. |
| SCANNER-SURFACE-03 | DONE (2025-11-07) | BuildX Plugin Guild | SURFACE-FS-02 | Push layer manifests and entry fragments into Surface.FS during build-time SBOM generation.<br>2025-11-06: Kicked off manifest emitter wiring within BuildX export pipeline and outlined test fixtures targeting Surface.FS client mock.<br>2025-11-07: Resumed work; reviewing Surface.FS models, CAS integration, and test harness approach before coding.<br>2025-11-07 22:10Z: Implemented Surface manifest writer + CLI plumbing, wired CAS persistence, documented the workflow, and added BuildX plug-in tests + Grafana fixture updates. | BuildX integration tests confirm cache population; CLI docs updated. |
| SCANNER-ENV-03 | TODO | BuildX Plugin Guild | SURFACE-ENV-02 | Adopt Surface.Env helpers for plugin configuration (cache roots, CAS endpoints, feature toggles). | Plugin loads helper; misconfig errors logged; README updated. |
| SCANNER-SECRETS-03 | TODO | BuildX Plugin Guild, Security Guild | SURFACE-SECRETS-02 | Use Surface.Secrets to retrieve registry credentials when interacting with CAS/referrers. | Secrets retrieved via shared library; e2e tests cover rotation; operations guide refreshed. |

View File

@@ -1,3 +1,4 @@
using System.Collections.Generic;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Surface.Env;
@@ -28,6 +29,13 @@ internal sealed class ScannerSurfaceSecretConfigurator : IConfigureOptions<Scann
ArgumentNullException.ThrowIfNull(options);
var tenant = _surfaceEnvironment.Settings.Secrets.Tenant;
ApplyCasAccessSecret(options, tenant);
ApplyRegistrySecret(options, tenant);
ApplyAttestationSecret(options, tenant);
}
private void ApplyCasAccessSecret(ScannerWebServiceOptions options, string tenant)
{
var request = new SurfaceSecretRequest(
Tenant: tenant,
Component: ComponentName,
@@ -56,6 +64,120 @@ internal sealed class ScannerSurfaceSecretConfigurator : IConfigureOptions<Scann
ApplySecret(options.ArtifactStore ??= new ScannerWebServiceOptions.ArtifactStoreOptions(), secret);
}
private void ApplyRegistrySecret(ScannerWebServiceOptions options, string tenant)
{
var request = new SurfaceSecretRequest(
Tenant: tenant,
Component: ComponentName,
SecretType: "registry");
RegistryAccessSecret? secret = null;
try
{
using var handle = _secretProvider.GetAsync(request).AsTask().GetAwaiter().GetResult();
secret = SurfaceSecretParser.ParseRegistryAccessSecret(handle);
}
catch (SurfaceSecretNotFoundException)
{
_logger.LogDebug("Surface secret 'registry' not found for {Component}; leaving registry credentials unchanged.", ComponentName);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to resolve surface secret 'registry' for {Component}.", ComponentName);
}
if (secret is null)
{
return;
}
options.Registry ??= new ScannerWebServiceOptions.RegistryOptions();
options.Registry.DefaultRegistry = secret.DefaultRegistry;
options.Registry.Credentials = new List<ScannerWebServiceOptions.RegistryCredentialOptions>();
foreach (var entry in secret.Entries)
{
var credential = new ScannerWebServiceOptions.RegistryCredentialOptions
{
Registry = entry.Registry,
Username = entry.Username,
Password = entry.Password,
IdentityToken = entry.IdentityToken,
RegistryToken = entry.RegistryToken,
RefreshToken = entry.RefreshToken,
ExpiresAt = entry.ExpiresAt,
AllowInsecureTls = entry.AllowInsecureTls,
Email = entry.Email
};
if (entry.Scopes.Count > 0)
{
credential.Scopes = new List<string>(entry.Scopes);
}
if (entry.Headers.Count > 0)
{
foreach (var (key, value) in entry.Headers)
{
credential.Headers[key] = value;
}
}
options.Registry.Credentials.Add(credential);
}
_logger.LogInformation(
"Surface secret 'registry' applied for {Component} (default: {DefaultRegistry}, entries: {Count}).",
ComponentName,
options.Registry.DefaultRegistry ?? "(none)",
options.Registry.Credentials.Count);
}
private void ApplyAttestationSecret(ScannerWebServiceOptions options, string tenant)
{
var request = new SurfaceSecretRequest(
Tenant: tenant,
Component: ComponentName,
SecretType: "attestation");
AttestationSecret? secret = null;
try
{
using var handle = _secretProvider.GetAsync(request).AsTask().GetAwaiter().GetResult();
secret = SurfaceSecretParser.ParseAttestationSecret(handle);
}
catch (SurfaceSecretNotFoundException)
{
_logger.LogDebug("Surface secret 'attestation' not found for {Component}; retaining signing configuration.", ComponentName);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to resolve surface secret 'attestation' for {Component}.", ComponentName);
}
if (secret is null)
{
return;
}
options.Signing ??= new ScannerWebServiceOptions.SigningOptions();
if (!string.IsNullOrWhiteSpace(secret.KeyPem))
{
options.Signing.KeyPem = secret.KeyPem;
}
if (!string.IsNullOrWhiteSpace(secret.CertificatePem))
{
options.Signing.CertificatePem = secret.CertificatePem;
}
if (!string.IsNullOrWhiteSpace(secret.CertificateChainPem))
{
options.Signing.CertificateChainPem = secret.CertificateChainPem;
}
}
private void ApplySecret(ScannerWebServiceOptions.ArtifactStoreOptions artifactStore, CasAccessSecret secret)
{
if (!string.IsNullOrWhiteSpace(secret.Driver))

View File

@@ -26,10 +26,15 @@ public sealed class ScannerWebServiceOptions
/// </summary>
public QueueOptions Queue { get; set; } = new();
/// <summary>
/// Object store configuration for SBOM artefacts.
/// </summary>
public ArtifactStoreOptions ArtifactStore { get; set; } = new();
/// <summary>
/// Object store configuration for SBOM artefacts.
/// </summary>
public ArtifactStoreOptions ArtifactStore { get; set; } = new();
/// <summary>
/// Registry credential configuration for report/export operations.
/// </summary>
public RegistryOptions Registry { get; set; } = new();
/// <summary>
/// Feature flags toggling optional behaviours.
@@ -144,11 +149,11 @@ public sealed class ScannerWebServiceOptions
public IDictionary<string, string> Headers { get; set; } = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
public sealed class FeatureFlagOptions
{
public bool AllowAnonymousScanSubmission { get; set; }
public bool EnableSignedReports { get; set; } = true;
public sealed class FeatureFlagOptions
{
public bool AllowAnonymousScanSubmission { get; set; }
public bool EnableSignedReports { get; set; } = true;
public bool EnablePolicyPreview { get; set; } = true;
@@ -233,11 +238,11 @@ public sealed class ScannerWebServiceOptions
}
}
public sealed class SigningOptions
{
public bool Enabled { get; set; } = false;
public string KeyId { get; set; } = string.Empty;
public sealed class SigningOptions
{
public bool Enabled { get; set; } = false;
public string KeyId { get; set; } = string.Empty;
public string Algorithm { get; set; } = "ed25519";
@@ -251,12 +256,44 @@ public sealed class ScannerWebServiceOptions
public string? CertificatePemFile { get; set; }
public string? CertificateChainPem { get; set; }
public string? CertificateChainPemFile { get; set; }
public int EnvelopeTtlSeconds { get; set; } = 600;
}
public string? CertificateChainPem { get; set; }
public string? CertificateChainPemFile { get; set; }
public int EnvelopeTtlSeconds { get; set; } = 600;
}
public sealed class RegistryOptions
{
public string? DefaultRegistry { get; set; }
public IList<RegistryCredentialOptions> Credentials { get; set; } = new List<RegistryCredentialOptions>();
}
public sealed class RegistryCredentialOptions
{
public string Registry { get; set; } = string.Empty;
public string? Username { get; set; }
public string? Password { get; set; }
public string? IdentityToken { get; set; }
public string? RegistryToken { get; set; }
public string? RefreshToken { get; set; }
public DateTimeOffset? ExpiresAt { get; set; }
public IList<string> Scopes { get; set; } = new List<string>();
public bool? AllowInsecureTls { get; set; }
public IDictionary<string, string> Headers { get; set; } = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
public string? Email { get; set; }
}
public sealed class ApiOptions
{

View File

@@ -179,6 +179,10 @@ internal sealed class SurfacePointerService : ISurfacePointerService
ArtifactDocumentFormat.SpdxJson => "spdx-json",
ArtifactDocumentFormat.BomIndex => "bom-index",
ArtifactDocumentFormat.DsseJson => "dsse-json",
ArtifactDocumentFormat.SurfaceManifestJson => "surface.manifest",
ArtifactDocumentFormat.EntryTraceGraphJson => "entrytrace.graph",
ArtifactDocumentFormat.EntryTraceNdjson => "entrytrace.ndjson",
ArtifactDocumentFormat.ComponentFragmentJson => "layer.fragments",
_ => format.ToString().ToLowerInvariant()
};
@@ -199,6 +203,12 @@ internal sealed class SurfacePointerService : ISurfacePointerService
ArtifactDocumentType.Diff => ("diff", null),
ArtifactDocumentType.Attestation => ("attestation", null),
ArtifactDocumentType.Index => ("bom-index", null),
ArtifactDocumentType.SurfaceManifest => ("surface.manifest", null),
ArtifactDocumentType.SurfaceEntryTrace when document.Format == ArtifactDocumentFormat.EntryTraceGraphJson
=> ("entrytrace.graph", null),
ArtifactDocumentType.SurfaceEntryTrace when document.Format == ArtifactDocumentFormat.EntryTraceNdjson
=> ("entrytrace.ndjson", null),
ArtifactDocumentType.SurfaceLayerFragment => ("layer.fragments", "inventory"),
_ => (document.Type.ToString().ToLowerInvariant(), null)
};
}

View File

@@ -6,7 +6,7 @@
| SCANNER-SURFACE-02 | DONE (2025-11-05) | Scanner WebService Guild | SURFACE-FS-02 | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata.<br>2025-11-05: Surface pointers projected through scan/report endpoints, orchestrator samples + DSSE fixtures refreshed with manifest block, readiness tests updated to use validator stub. | OpenAPI updated; clients regenerated; integration tests validate pointer presence and tenancy. |
| SCANNER-ENV-02 | TODO (2025-11-06) | Scanner WebService Guild, Ops Guild | SURFACE-ENV-02 | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration.<br>2025-11-02: Cache root resolution switched to helper; feature flag bindings updated; Helm/Compose updates pending review.<br>2025-11-05 14:55Z: Aligning readiness checks, docs, and Helm/Compose templates with Surface.Env outputs and planning test coverage for configuration fallbacks.<br>2025-11-06 17:05Z: Surface.Env documentation/README refreshed; warning catalogue captured for ops handoff.<br>2025-11-06 07:45Z: Helm values (dev/stage/prod/airgap/mirror) and Compose examples updated with `SCANNER_SURFACE_*` defaults plus rollout warning note in `deploy/README.md`.<br>2025-11-06 07:55Z: Paused; follow-up automation captured under `DEVOPS-OPENSSL-11-001/002` and pending Surface.Env readiness tests. | Service uses helper; env table documented; helm/compose templates updated. |
> 2025-11-05 19:18Z: Added configurator to project wiring and unit test ensuring Surface.Env cache root is honoured.
| SCANNER-SECRETS-02 | DOING (2025-11-06) | Scanner WebService Guild, Security Guild | SURFACE-SECRETS-02 | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens).<br>2025-11-02: Export/report flows now depend on Surface.Secrets stub; integration tests in progress.<br>2025-11-06: Restarting work to eliminate file-based secrets, plumb provider handles through report/export services, and extend failure/rotation tests.<br>2025-11-06 21:40Z: Added configurator + storage post-config to hydrate artifact/CAS credentials from `cas-access` secrets with unit coverage. | Secrets fetched through shared provider; unit/integration tests cover rotation + failure cases. |
| SCANNER-SECRETS-02 | DONE (2025-11-06) | Scanner WebService Guild, Security Guild | SURFACE-SECRETS-02 | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens).<br>2025-11-02: Export/report flows now depend on Surface.Secrets stub; integration tests in progress.<br>2025-11-06: Restarting work to eliminate file-based secrets, plumb provider handles through report/export services, and extend failure/rotation tests.<br>2025-11-06 21:40Z: Added configurator + storage post-config to hydrate artifact/CAS credentials from `cas-access` secrets with unit coverage.<br>2025-11-06 23:58Z: Registry & attestation secrets now resolved via Surface.Secrets (options + tests updated); dotnet test suites executed with .NET 10 RC2 runtime where available. | Secrets fetched through shared provider; unit/integration tests cover rotation + failure cases. |
| SCANNER-EVENTS-16-301 | BLOCKED (2025-10-26) | Scanner WebService Guild | ORCH-SVC-38-101, NOTIFY-SVC-38-001 | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Tests assert envelope schema + orchestrator publish; Notifier consumer harness passes; docs updated with new event contract. Blocked by .NET 10 preview OpenAPI/Auth dependency drift preventing `dotnet test` completion. |
| SCANNER-EVENTS-16-302 | DONE (2025-11-06) | Scanner WebService Guild | SCANNER-EVENTS-16-301 | Extend orchestrator event links (report/policy/attestation) once endpoints are finalised across gateway + console.<br>2025-11-06 22:55Z: Dispatcher now honours configurable API/console base segments, JSON samples/docs refreshed, and `ReportEventDispatcherTests` extended. Tests: `StellaOps.Scanner.WebService.Tests` build until pre-existing `SurfaceCacheOptionsConfiguratorTests` ctor signature drift (tracked separately). | Links section covers UI/API targets; downstream consumers validated; docs/samples updated. |

View File

@@ -1,7 +1,8 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using StellaOps.Scanner.Worker.Processing;
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using StellaOps.Scanner.Surface.Secrets;
using StellaOps.Scanner.Worker.Processing;
namespace StellaOps.Scanner.Worker.Diagnostics;
@@ -9,11 +10,18 @@ public sealed class ScannerWorkerMetrics
{
private readonly Histogram<double> _queueLatencyMs;
private readonly Histogram<double> _jobDurationMs;
private readonly Histogram<double> _stageDurationMs;
private readonly Histogram<double> _stageDurationMs;
private readonly Counter<long> _jobsCompleted;
private readonly Counter<long> _jobsFailed;
private readonly Counter<long> _languageCacheHits;
private readonly Counter<long> _languageCacheMisses;
private readonly Counter<long> _registrySecretRequests;
private readonly Histogram<double> _registrySecretTtlSeconds;
private readonly Counter<long> _surfaceManifestsPublished;
private readonly Counter<long> _surfaceManifestSkipped;
private readonly Counter<long> _surfaceManifestFailures;
private readonly Counter<long> _surfacePayloadPersisted;
private readonly Histogram<double> _surfaceManifestPublishDurationMs;
public ScannerWorkerMetrics()
{
@@ -41,6 +49,29 @@ public sealed class ScannerWorkerMetrics
_languageCacheMisses = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
"scanner_worker_language_cache_misses_total",
description: "Number of language analyzer cache misses encountered by the worker.");
_registrySecretRequests = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
"scanner_worker_registry_secret_requests_total",
description: "Number of registry secret resolution attempts performed by the worker.");
_registrySecretTtlSeconds = ScannerWorkerInstrumentation.Meter.CreateHistogram<double>(
"scanner_worker_registry_secret_ttl_seconds",
unit: "s",
description: "Time-to-live in seconds for resolved registry secrets (earliest expiration).");
_surfaceManifestsPublished = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
"scanner_worker_surface_manifests_published_total",
description: "Number of surface manifests successfully published by the worker.");
_surfaceManifestSkipped = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
"scanner_worker_surface_manifests_skipped_total",
description: "Number of surface manifest publish attempts skipped due to missing payloads.");
_surfaceManifestFailures = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
"scanner_worker_surface_manifests_failed_total",
description: "Number of surface manifest publish attempts that failed.");
_surfacePayloadPersisted = ScannerWorkerInstrumentation.Meter.CreateCounter<long>(
"scanner_worker_surface_payload_persisted_total",
description: "Number of surface payload artefacts persisted to the local cache.");
_surfaceManifestPublishDurationMs = ScannerWorkerInstrumentation.Meter.CreateHistogram<double>(
"scanner_worker_surface_manifest_publish_duration_ms",
unit: "ms",
description: "Duration in milliseconds to persist and publish surface manifests.");
}
public void RecordQueueLatency(ScanJobContext context, TimeSpan latency)
@@ -63,15 +94,15 @@ public sealed class ScannerWorkerMetrics
_jobDurationMs.Record(duration.TotalMilliseconds, CreateTags(context));
}
public void RecordStageDuration(ScanJobContext context, string stage, TimeSpan duration)
{
if (duration <= TimeSpan.Zero)
{
return;
}
_stageDurationMs.Record(duration.TotalMilliseconds, CreateTags(context, stage: stage));
}
public void RecordStageDuration(ScanJobContext context, string stage, TimeSpan duration)
{
if (duration <= TimeSpan.Zero)
{
return;
}
_stageDurationMs.Record(duration.TotalMilliseconds, CreateTags(context, stage: stage));
}
public void IncrementJobCompleted(ScanJobContext context)
{
@@ -93,9 +124,130 @@ public sealed class ScannerWorkerMetrics
_languageCacheMisses.Add(1, CreateTags(context, analyzerId: analyzerId));
}
private static KeyValuePair<string, object?>[] CreateTags(ScanJobContext context, string? stage = null, string? failureReason = null, string? analyzerId = null)
public void RecordRegistrySecretResolved(
ScanJobContext context,
string secretName,
RegistryAccessSecret secret,
TimeProvider timeProvider)
{
var tags = new List<KeyValuePair<string, object?>>(stage is null ? 5 : 6)
var tags = CreateTags(
context,
secretName: secretName,
secretResult: "resolved",
secretEntryCount: secret.Entries.Count);
_registrySecretRequests.Add(1, tags);
if (ComputeTtlSeconds(secret, timeProvider) is double ttlSeconds)
{
_registrySecretTtlSeconds.Record(ttlSeconds, tags);
}
}
public void RecordRegistrySecretMissing(ScanJobContext context, string secretName)
{
var tags = CreateTags(context, secretName: secretName, secretResult: "missing");
_registrySecretRequests.Add(1, tags);
}
public void RecordRegistrySecretFailure(ScanJobContext context, string secretName)
{
var tags = CreateTags(context, secretName: secretName, secretResult: "failure");
_registrySecretRequests.Add(1, tags);
}
public void RecordSurfaceManifestPublished(ScanJobContext context, int payloadCount, TimeSpan duration)
{
if (payloadCount < 0)
{
payloadCount = 0;
}
var tags = CreateTags(
context,
surfaceAction: "manifest",
surfaceResult: "published",
surfacePayloadCount: payloadCount);
_surfaceManifestsPublished.Add(1, tags);
if (duration > TimeSpan.Zero)
{
_surfaceManifestPublishDurationMs.Record(duration.TotalMilliseconds, tags);
}
}
public void RecordSurfaceManifestSkipped(ScanJobContext context)
{
var tags = CreateTags(context, surfaceAction: "manifest", surfaceResult: "skipped");
_surfaceManifestSkipped.Add(1, tags);
}
public void RecordSurfaceManifestFailed(ScanJobContext context, string failureReason)
{
var tags = CreateTags(
context,
surfaceAction: "manifest",
surfaceResult: "failed",
failureReason: failureReason);
_surfaceManifestFailures.Add(1, tags);
}
public void RecordSurfacePayloadPersisted(ScanJobContext context, string surfaceKind)
{
var normalizedKind = string.IsNullOrWhiteSpace(surfaceKind)
? "unknown"
: surfaceKind.Trim().ToLowerInvariant();
var tags = CreateTags(
context,
surfaceAction: "payload",
surfaceKind: normalizedKind,
surfaceResult: "cached");
_surfacePayloadPersisted.Add(1, tags);
}
private static double? ComputeTtlSeconds(RegistryAccessSecret secret, TimeProvider timeProvider)
{
DateTimeOffset? earliest = null;
foreach (var entry in secret.Entries)
{
if (entry.ExpiresAt is null)
{
continue;
}
if (earliest is null || entry.ExpiresAt < earliest)
{
earliest = entry.ExpiresAt;
}
}
if (earliest is null)
{
return null;
}
var now = timeProvider.GetUtcNow();
var ttl = (earliest.Value - now).TotalSeconds;
return ttl < 0 ? 0 : ttl;
}
private static KeyValuePair<string, object?>[] CreateTags(
ScanJobContext context,
string? stage = null,
string? failureReason = null,
string? analyzerId = null,
string? secretName = null,
string? secretResult = null,
int? secretEntryCount = null,
string? surfaceAction = null,
string? surfaceKind = null,
string? surfaceResult = null,
int? surfacePayloadCount = null)
{
var tags = new List<KeyValuePair<string, object?>>(8)
{
new("job.id", context.JobId),
new("scan.id", context.ScanId),
@@ -113,10 +265,10 @@ public sealed class ScannerWorkerMetrics
}
if (!string.IsNullOrWhiteSpace(stage))
{
tags.Add(new KeyValuePair<string, object?>("stage", stage));
}
{
tags.Add(new KeyValuePair<string, object?>("stage", stage));
}
if (!string.IsNullOrWhiteSpace(failureReason))
{
tags.Add(new KeyValuePair<string, object?>("reason", failureReason));
@@ -127,6 +279,41 @@ public sealed class ScannerWorkerMetrics
tags.Add(new KeyValuePair<string, object?>("analyzer.id", analyzerId));
}
if (!string.IsNullOrWhiteSpace(secretName))
{
tags.Add(new KeyValuePair<string, object?>("secret.name", secretName));
}
if (!string.IsNullOrWhiteSpace(secretResult))
{
tags.Add(new KeyValuePair<string, object?>("secret.result", secretResult));
}
if (secretEntryCount is not null)
{
tags.Add(new KeyValuePair<string, object?>("secret.entries", secretEntryCount.Value));
}
if (!string.IsNullOrWhiteSpace(surfaceAction))
{
tags.Add(new KeyValuePair<string, object?>("surface.action", surfaceAction));
}
if (!string.IsNullOrWhiteSpace(surfaceKind))
{
tags.Add(new KeyValuePair<string, object?>("surface.kind", surfaceKind));
}
if (!string.IsNullOrWhiteSpace(surfaceResult))
{
tags.Add(new KeyValuePair<string, object?>("surface.result", surfaceResult));
}
if (surfacePayloadCount is not null)
{
tags.Add(new KeyValuePair<string, object?>("surface.payload_count", surfacePayloadCount.Value));
}
return tags.ToArray();
}
}

View File

@@ -0,0 +1,108 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.Secrets;
using StellaOps.Scanner.Worker.Diagnostics;
namespace StellaOps.Scanner.Worker.Processing;
internal sealed class RegistrySecretStageExecutor : IScanStageExecutor
{
private const string ComponentName = "Scanner.Worker.Registry";
private const string SecretType = "registry";
private static readonly string[] SecretNameMetadataKeys =
{
"surface.registry.secret",
"scanner.registry.secret",
"registry.secret",
};
private readonly ISurfaceSecretProvider _secretProvider;
private readonly ISurfaceEnvironment _surfaceEnvironment;
private readonly ScannerWorkerMetrics _metrics;
private readonly TimeProvider _timeProvider;
private readonly ILogger<RegistrySecretStageExecutor> _logger;
public RegistrySecretStageExecutor(
ISurfaceSecretProvider secretProvider,
ISurfaceEnvironment surfaceEnvironment,
ScannerWorkerMetrics metrics,
TimeProvider timeProvider,
ILogger<RegistrySecretStageExecutor> logger)
{
_secretProvider = secretProvider ?? throw new ArgumentNullException(nameof(secretProvider));
_surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public string StageName => ScanStageNames.ResolveImage;
public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
var secretName = ResolveSecretName(context.Lease.Metadata);
var request = new SurfaceSecretRequest(
Tenant: _surfaceEnvironment.Settings.Secrets.Tenant,
Component: ComponentName,
SecretType: SecretType,
Name: secretName);
try
{
using var handle = await _secretProvider.GetAsync(request, cancellationToken).ConfigureAwait(false);
var secret = SurfaceSecretParser.ParseRegistryAccessSecret(handle);
context.Analysis.Set(ScanAnalysisKeys.RegistryCredentials, secret);
_metrics.RecordRegistrySecretResolved(
context,
secretName ?? "default",
secret,
_timeProvider);
_logger.LogInformation(
"Registry secret '{SecretName}' resolved with {EntryCount} entries for job {JobId}.",
secretName ?? "default",
secret.Entries.Count,
context.JobId);
}
catch (SurfaceSecretNotFoundException)
{
_metrics.RecordRegistrySecretMissing(context, secretName ?? "default");
_logger.LogDebug(
"Registry secret '{SecretName}' not found for job {JobId}; continuing without registry credentials.",
secretName ?? "default",
context.JobId);
}
catch (Exception ex)
{
_metrics.RecordRegistrySecretFailure(context, secretName ?? "default");
_logger.LogWarning(
ex,
"Failed to resolve registry secret '{SecretName}' for job {JobId}; continuing without registry credentials.",
secretName ?? "default",
context.JobId);
}
}
private static string? ResolveSecretName(IReadOnlyDictionary<string, string> metadata)
{
foreach (var key in SecretNameMetadataKeys)
{
if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value))
{
return value.Trim();
}
}
return null;
}
}

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Core.Contracts;
@@ -37,7 +38,12 @@ internal sealed record SurfaceManifestRequest(
string? Version,
string? WorkerInstance);
internal sealed class SurfaceManifestPublisher
internal interface ISurfaceManifestPublisher
{
Task<SurfaceManifestPublishResult> PublishAsync(SurfaceManifestRequest request, CancellationToken cancellationToken);
}
internal sealed class SurfaceManifestPublisher : ISurfaceManifestPublisher
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{

View File

@@ -1,14 +1,20 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.EntryTrace;
using StellaOps.Scanner.EntryTrace.Serialization;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.FS;
using StellaOps.Scanner.Storage.Catalog;
using StellaOps.Scanner.Worker.Diagnostics;
namespace StellaOps.Scanner.Worker.Processing.Surface;
@@ -20,15 +26,30 @@ internal sealed class SurfaceManifestStageExecutor : IScanStageExecutor
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
private readonly SurfaceManifestPublisher _publisher;
private static readonly JsonSerializerOptions ManifestSerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
private readonly ISurfaceManifestPublisher _publisher;
private readonly ISurfaceCache _surfaceCache;
private readonly ISurfaceEnvironment _surfaceEnvironment;
private readonly ScannerWorkerMetrics _metrics;
private readonly ILogger<SurfaceManifestStageExecutor> _logger;
private readonly string _componentVersion;
public SurfaceManifestStageExecutor(
SurfaceManifestPublisher publisher,
ISurfaceManifestPublisher publisher,
ISurfaceCache surfaceCache,
ISurfaceEnvironment surfaceEnvironment,
ScannerWorkerMetrics metrics,
ILogger<SurfaceManifestStageExecutor> logger)
{
_publisher = publisher ?? throw new ArgumentNullException(nameof(publisher));
_surfaceCache = surfaceCache ?? throw new ArgumentNullException(nameof(surfaceCache));
_surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_componentVersion = Assembly.GetExecutingAssembly().GetName().Version?.ToString() ?? "unknown";
}
@@ -42,23 +63,49 @@ internal sealed class SurfaceManifestStageExecutor : IScanStageExecutor
var payloads = CollectPayloads(context);
if (payloads.Count == 0)
{
_metrics.RecordSurfaceManifestSkipped(context);
_logger.LogDebug("No surface payloads available for job {JobId}; skipping manifest publish.", context.JobId);
return;
}
var request = new SurfaceManifestRequest(
ScanId: context.ScanId,
ImageDigest: ResolveImageDigest(context),
Attempt: context.Lease.Attempt,
Metadata: context.Lease.Metadata,
Payloads: payloads,
Component: "scanner.worker",
Version: _componentVersion,
WorkerInstance: Environment.MachineName);
var tenant = _surfaceEnvironment.Settings?.Tenant ?? string.Empty;
var stopwatch = Stopwatch.StartNew();
var result = await _publisher.PublishAsync(request, cancellationToken).ConfigureAwait(false);
context.Analysis.Set(ScanAnalysisKeys.SurfaceManifest, result);
_logger.LogInformation("Surface manifest stored for job {JobId} with digest {Digest}.", context.JobId, result.ManifestDigest);
try
{
await PersistPayloadsToSurfaceCacheAsync(context, tenant, payloads, cancellationToken).ConfigureAwait(false);
var request = new SurfaceManifestRequest(
ScanId: context.ScanId,
ImageDigest: ResolveImageDigest(context),
Attempt: context.Lease.Attempt,
Metadata: context.Lease.Metadata,
Payloads: payloads,
Component: "scanner.worker",
Version: _componentVersion,
WorkerInstance: Environment.MachineName);
var result = await _publisher.PublishAsync(request, cancellationToken).ConfigureAwait(false);
await PersistManifestToSurfaceCacheAsync(context, tenant, result, cancellationToken).ConfigureAwait(false);
context.Analysis.Set(ScanAnalysisKeys.SurfaceManifest, result);
stopwatch.Stop();
_metrics.RecordSurfaceManifestPublished(context, payloads.Count, stopwatch.Elapsed);
_logger.LogInformation("Surface manifest stored for job {JobId} with digest {Digest}.", context.JobId, result.ManifestDigest);
}
catch (OperationCanceledException)
{
stopwatch.Stop();
throw;
}
catch (Exception ex)
{
stopwatch.Stop();
_metrics.RecordSurfaceManifestFailed(context, ex.GetType().Name);
_logger.LogError(ex, "Failed to persist surface manifest for job {JobId}.", context.JobId);
throw;
}
}
private List<SurfaceManifestPayload> CollectPayloads(ScanJobContext context)
@@ -118,6 +165,56 @@ internal sealed class SurfaceManifestStageExecutor : IScanStageExecutor
return payloads;
}
private async Task PersistPayloadsToSurfaceCacheAsync(
ScanJobContext context,
string tenant,
IReadOnlyList<SurfaceManifestPayload> payloads,
CancellationToken cancellationToken)
{
if (payloads.Count == 0)
{
return;
}
foreach (var payload in payloads)
{
cancellationToken.ThrowIfCancellationRequested();
var digest = ComputeDigest(payload.Content.Span);
var normalizedKind = NormalizeKind(payload.Kind);
var cacheKey = CreateArtifactCacheKey(tenant, normalizedKind, digest);
await _surfaceCache.SetAsync(cacheKey, payload.Content, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Cached surface payload {Kind} for job {JobId} with digest {Digest}.",
normalizedKind,
context.JobId,
digest);
_metrics.RecordSurfacePayloadPersisted(context, normalizedKind);
}
}
private async Task PersistManifestToSurfaceCacheAsync(
ScanJobContext context,
string tenant,
SurfaceManifestPublishResult result,
CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(result.Document, ManifestSerializerOptions);
var cacheKey = CreateManifestCacheKey(tenant, result.ManifestDigest);
await _surfaceCache.SetAsync(cacheKey, manifestBytes, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Cached surface manifest for job {JobId} with digest {Digest}.",
context.JobId,
result.ManifestDigest);
}
private static string ResolveImageDigest(ScanJobContext context)
{
static bool TryGet(IReadOnlyDictionary<string, string> metadata, string key, out string value)
@@ -143,5 +240,46 @@ internal sealed class SurfaceManifestStageExecutor : IScanStageExecutor
return context.ScanId;
}
private static SurfaceCacheKey CreateArtifactCacheKey(string tenant, string kind, string digest)
{
var @namespace = $"surface.artifacts.{kind}";
var contentKey = NormalizeDigestForKey(digest);
return new SurfaceCacheKey(@namespace, tenant, contentKey);
}
private static SurfaceCacheKey CreateManifestCacheKey(string tenant, string digest)
{
var contentKey = NormalizeDigestForKey(digest);
return new SurfaceCacheKey("surface.manifests", tenant, contentKey);
}
private static string NormalizeKind(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "unknown";
}
var trimmed = value.Trim();
return trimmed.ToLowerInvariant();
}
private static string NormalizeDigestForKey(string? digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
return string.Empty;
}
return digest.Trim();
}
private static string ComputeDigest(ReadOnlySpan<byte> content)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(content, hash);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static readonly IFormatProvider CultureInfoInvariant = System.Globalization.CultureInfo.InvariantCulture;
}

View File

@@ -55,7 +55,7 @@ if (!string.IsNullOrWhiteSpace(connectionString))
{
builder.Services.AddScannerStorage(storageSection);
builder.Services.AddSingleton<IConfigureOptions<ScannerStorageOptions>, ScannerStorageSurfaceSecretConfigurator>();
builder.Services.AddSingleton<SurfaceManifestPublisher>();
builder.Services.AddSingleton<ISurfaceManifestPublisher, SurfaceManifestPublisher>();
builder.Services.AddSingleton<IScanStageExecutor, SurfaceManifestStageExecutor>();
}
@@ -64,6 +64,7 @@ builder.Services.TryAddSingleton<IPluginCatalogGuard, RestartOnlyPluginGuard>();
builder.Services.AddSingleton<IOSAnalyzerPluginCatalog, OsAnalyzerPluginCatalog>();
builder.Services.AddSingleton<ILanguageAnalyzerPluginCatalog, LanguageAnalyzerPluginCatalog>();
builder.Services.AddSingleton<IScanAnalyzerDispatcher, CompositeScanAnalyzerDispatcher>();
builder.Services.AddSingleton<IScanStageExecutor, RegistrySecretStageExecutor>();
builder.Services.AddSingleton<IScanStageExecutor, AnalyzerStageExecutor>();
builder.Services.AddSingleton<ScannerWorkerHostedService>();

View File

@@ -3,7 +3,10 @@
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCAN-REPLAY-186-002 | TODO | Scanner Worker Guild | REPLAY-CORE-185-001 | Enforce deterministic analyzer execution when consuming replay input bundles, emit layer Merkle metadata, and author `docs/modules/scanner/deterministic-execution.md` summarising invariants from `docs/replay/DETERMINISTIC_REPLAY.md` Section 4. | Replay mode analyzers pass determinism tests; new doc merged; integration fixtures updated. |
| SCANNER-SURFACE-01 | DOING (2025-11-06) | Scanner Worker Guild | SURFACE-FS-02 | Persist Surface.FS manifests after analyzer stages, including layer CAS metadata and EntryTrace fragments.<br>2025-11-02: Draft Surface.FS manifests emitted for sample scans; telemetry counters under review.<br>2025-11-06: Resuming with manifest writer abstraction, rotation metadata, and telemetry counters for Surface.FS persistence. | Integration tests prove cache entries exist; telemetry counters exported. |
| SCANNER-SURFACE-01 | DONE (2025-11-06) | Scanner Worker Guild | SURFACE-FS-02 | Persist Surface.FS manifests after analyzer stages, including layer CAS metadata and EntryTrace fragments.<br>2025-11-02: Draft Surface.FS manifests emitted for sample scans; telemetry counters under review.<br>2025-11-06: Resuming with manifest writer abstraction, rotation metadata, and telemetry counters for Surface.FS persistence.<br>2025-11-06 21:05Z: Stage now persists manifest/payload caches, exports metrics to Prometheus/Grafana, and WebService pointer tests validate consumption. | Integration tests prove cache entries exist; telemetry counters exported. |
> 2025-11-05 19:18Z: Bound root directory to resolved Surface.Env settings and added unit coverage around the configurator.
> 2025-11-06 18:45Z: Resuming manifest persistence—planning publisher abstraction refactor, CAS storage wiring, and telemetry/test coverage.
> 2025-11-06 20:20Z: Hooked Surface metrics into Grafana (new dashboard JSON) and verified WebService consumption via end-to-end pointer test seeding manifest + payload entries.
> 2025-11-06 21:05Z: Completed Surface manifest cache + metrics work; tests/docs updated and task ready to close.
| SCANNER-ENV-01 | TODO (2025-11-06) | Scanner Worker Guild | SURFACE-ENV-02 | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints.<br>2025-11-02: Worker bootstrap now resolves cache roots via helper; warning path documented; smoke tests running.<br>2025-11-05 14:55Z: Extending helper usage into cache/secrets configuration, updating worker validator wiring, and drafting docs/tests for new Surface.Env outputs.<br>2025-11-06 17:05Z: README/design docs updated with warning catalogue; startup logging guidance captured for ops runbooks.<br>2025-11-06 07:45Z: Helm/Compose env profiles (dev/stage/prod/airgap/mirror) now seed `SCANNER_SURFACE_*` defaults to keep worker cache roots aligned with Surface.Env helpers.<br>2025-11-06 07:55Z: Paused; pending automation tracked via `DEVOPS-OPENSSL-11-001/002` and Surface.Env test fixtures. | Worker boots with helper; misconfiguration warnings documented; smoke tests updated. |
> 2025-11-05 19:18Z: Bound `SurfaceCacheOptions` root directory to resolved Surface.Env settings and added unit coverage around the configurator.
| SCANNER-SECRETS-01 | DOING (2025-11-06) | Scanner Worker Guild, Security Guild | SURFACE-SECRETS-02 | Adopt `StellaOps.Scanner.Surface.Secrets` for registry/CAS credentials during scan execution.<br>2025-11-02: Surface.Secrets provider wired for CAS token retrieval; integration tests added.<br>2025-11-06: Continuing to replace legacy registry credential plumbing and extend rotation metrics/fixtures.<br>2025-11-06 21:35Z: Introduced `ScannerStorageSurfaceSecretConfigurator` mapping `cas-access` secrets into storage options plus unit coverage. | Secrets fetched via shared provider; legacy secret code removed; integration tests cover rotation. |
| SCANNER-SECRETS-01 | DONE (2025-11-06) | Scanner Worker Guild, Security Guild | SURFACE-SECRETS-02 | Adopt `StellaOps.Scanner.Surface.Secrets` for registry/CAS credentials during scan execution.<br>2025-11-02: Surface.Secrets provider wired for CAS token retrieval; integration tests added.<br>2025-11-06: Replaced registry credential plumbing with shared provider, added registry secret stage + metrics, and installed .NET 10 RC2 to validate parser/stage suites via targeted `dotnet test`. | Secrets fetched via shared provider; legacy secret code removed; integration tests cover rotation. |

View File

@@ -17,4 +17,6 @@ public static class ScanAnalysisKeys
public const string EntryTraceNdjson = "analysis.entrytrace.ndjson";
public const string SurfaceManifest = "analysis.surface.manifest";
public const string RegistryCredentials = "analysis.registry.credentials";
}

View File

@@ -0,0 +1,53 @@
using System.Text.Json;
namespace StellaOps.Scanner.Surface.Secrets;
public sealed record AttestationSecret(
string KeyPem,
string? CertificatePem,
string? CertificateChainPem,
string? RekorApiToken);
public static partial class SurfaceSecretParser
{
public static AttestationSecret ParseAttestationSecret(SurfaceSecretHandle handle)
{
ArgumentNullException.ThrowIfNull(handle);
var payload = handle.AsBytes();
if (payload.IsEmpty)
{
throw new InvalidOperationException("Surface secret payload is empty.");
}
using var document = JsonDocument.Parse(DecodeUtf8(payload));
var root = document.RootElement;
var keyPem = GetString(root, "keyPem")
?? GetString(root, "pem")
?? GetMetadataValue(handle.Metadata, "keyPem")
?? GetMetadataValue(handle.Metadata, "pem");
if (string.IsNullOrWhiteSpace(keyPem))
{
throw new InvalidOperationException("Attestation secret must include a 'keyPem' value.");
}
var certificatePem = GetString(root, "certificatePem")
?? GetMetadataValue(handle.Metadata, "certificatePem");
var certificateChainPem = GetString(root, "certificateChainPem")
?? GetMetadataValue(handle.Metadata, "certificateChainPem");
var rekorToken = GetString(root, "rekorToken")
?? GetString(root, "rekorApiToken")
?? GetMetadataValue(handle.Metadata, "rekorToken")
?? GetMetadataValue(handle.Metadata, "rekorApiToken");
return new AttestationSecret(
keyPem.Trim(),
certificatePem?.Trim(),
certificateChainPem?.Trim(),
rekorToken?.Trim());
}
}

View File

@@ -19,7 +19,7 @@ public sealed record CasAccessSecret(
string? SessionToken,
bool? AllowInsecureTls);
public static class SurfaceSecretParser
public static partial class SurfaceSecretParser
{
public static CasAccessSecret ParseCasAccessSecret(SurfaceSecretHandle handle)
{

View File

@@ -0,0 +1,347 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Text;
using System.Text.Json;
namespace StellaOps.Scanner.Surface.Secrets;
public sealed record RegistryAccessSecret(
IReadOnlyList<RegistryCredential> Entries,
string? DefaultRegistry);
public sealed record RegistryCredential(
string Registry,
string? Username,
string? Password,
string? IdentityToken,
string? RegistryToken,
string? RefreshToken,
DateTimeOffset? ExpiresAt,
IReadOnlyCollection<string> Scopes,
bool? AllowInsecureTls,
IReadOnlyDictionary<string, string> Headers,
string? Email);
public static partial class SurfaceSecretParser
{
public static RegistryAccessSecret ParseRegistryAccessSecret(SurfaceSecretHandle handle)
{
ArgumentNullException.ThrowIfNull(handle);
var entries = new List<RegistryCredential>();
string? defaultRegistry = null;
var payload = handle.AsBytes();
if (!payload.IsEmpty)
{
var jsonText = DecodeUtf8(payload);
using var document = JsonDocument.Parse(jsonText);
var root = document.RootElement;
defaultRegistry = GetString(root, "defaultRegistry") ?? GetMetadataValue(handle.Metadata, "defaultRegistry");
if (TryParseRegistryEntries(root, handle.Metadata, entries) ||
TryParseAuthsObject(root, handle.Metadata, entries))
{
// entries already populated
}
else if (root.ValueKind == JsonValueKind.Object && root.GetRawText().Length > 2) // not empty object
{
entries.Add(ParseRegistryEntry(root, handle.Metadata, fallbackRegistry: null));
}
}
if (entries.Count == 0 && TryCreateRegistryEntryFromMetadata(handle.Metadata, out var metadataEntry))
{
entries.Add(metadataEntry);
}
if (entries.Count == 0)
{
throw new InvalidOperationException("Registry secret payload does not contain credentials.");
}
defaultRegistry ??= GetMetadataValue(handle.Metadata, "defaultRegistry")
?? entries[0].Registry;
return new RegistryAccessSecret(
new ReadOnlyCollection<RegistryCredential>(entries),
string.IsNullOrWhiteSpace(defaultRegistry) ? entries[0].Registry : defaultRegistry.Trim());
}
private static bool TryParseRegistryEntries(
JsonElement root,
IReadOnlyDictionary<string, string> metadata,
ICollection<RegistryCredential> entries)
{
if (!TryGetPropertyIgnoreCase(root, "entries", out var entriesElement) ||
entriesElement.ValueKind != JsonValueKind.Array)
{
return false;
}
foreach (var entryElement in entriesElement.EnumerateArray())
{
if (entryElement.ValueKind != JsonValueKind.Object)
{
continue;
}
entries.Add(ParseRegistryEntry(entryElement, metadata, fallbackRegistry: null));
}
return entries.Count > 0;
}
private static bool TryParseAuthsObject(
JsonElement root,
IReadOnlyDictionary<string, string> metadata,
ICollection<RegistryCredential> entries)
{
if (!TryGetPropertyIgnoreCase(root, "auths", out var authsElement) ||
authsElement.ValueKind != JsonValueKind.Object)
{
return false;
}
foreach (var property in authsElement.EnumerateObject())
{
if (property.Value.ValueKind != JsonValueKind.Object)
{
continue;
}
entries.Add(ParseRegistryEntry(property.Value, metadata, property.Name));
}
return entries.Count > 0;
}
private static RegistryCredential ParseRegistryEntry(
JsonElement element,
IReadOnlyDictionary<string, string> metadata,
string? fallbackRegistry)
{
var registry = GetString(element, "registry")
?? GetString(element, "server")
?? fallbackRegistry
?? GetMetadataValue(metadata, "registry")
?? throw new InvalidOperationException("Registry credential is missing a registry identifier.");
registry = registry.Trim();
var username = GetString(element, "username") ?? GetString(element, "user");
var password = GetString(element, "password") ?? GetString(element, "pass");
var token = GetString(element, "token") ?? GetString(element, "registryToken");
var identityToken = GetString(element, "identityToken") ?? GetString(element, "identitytoken");
var refreshToken = GetString(element, "refreshToken");
var email = GetString(element, "email");
var allowInsecure = GetBoolean(element, "allowInsecureTls");
var headers = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
PopulateHeaders(element, headers);
PopulateMetadataHeaders(metadata, headers);
var scopes = new List<string>();
PopulateScopes(element, scopes);
PopulateMetadataScopes(metadata, scopes);
var expiresAt = ParseDateTime(element, "expiresAt");
var auth = GetString(element, "auth");
if (!string.IsNullOrWhiteSpace(auth))
{
TryApplyBasicAuth(auth, ref username, ref password);
}
username ??= GetMetadataValue(metadata, "username");
password ??= GetMetadataValue(metadata, "password");
token ??= GetMetadataValue(metadata, "token") ?? GetMetadataValue(metadata, "registryToken");
identityToken ??= GetMetadataValue(metadata, "identityToken");
refreshToken ??= GetMetadataValue(metadata, "refreshToken");
email ??= GetMetadataValue(metadata, "email");
return new RegistryCredential(
registry,
username?.Trim(),
password,
identityToken,
token,
refreshToken,
expiresAt,
scopes.Count == 0 ? Array.Empty<string>() : new ReadOnlyCollection<string>(scopes),
allowInsecure,
new ReadOnlyDictionary<string, string>(headers),
email);
}
private static bool TryCreateRegistryEntryFromMetadata(
IReadOnlyDictionary<string, string> metadata,
out RegistryCredential entry)
{
var registry = GetMetadataValue(metadata, "registry");
var username = GetMetadataValue(metadata, "username");
var password = GetMetadataValue(metadata, "password");
var identityToken = GetMetadataValue(metadata, "identityToken");
var token = GetMetadataValue(metadata, "token") ?? GetMetadataValue(metadata, "registryToken");
if (string.IsNullOrWhiteSpace(registry) &&
string.IsNullOrWhiteSpace(username) &&
string.IsNullOrWhiteSpace(password) &&
string.IsNullOrWhiteSpace(identityToken) &&
string.IsNullOrWhiteSpace(token))
{
entry = null!;
return false;
}
var headers = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
PopulateMetadataHeaders(metadata, headers);
var scopes = new List<string>();
PopulateMetadataScopes(metadata, scopes);
entry = new RegistryCredential(
registry?.Trim() ?? "registry.local",
username?.Trim(),
password,
identityToken,
token,
GetMetadataValue(metadata, "refreshToken"),
ParseDateTime(metadata, "expiresAt"),
scopes.Count == 0 ? Array.Empty<string>() : new ReadOnlyCollection<string>(scopes),
ParseBoolean(metadata, "allowInsecureTls"),
new ReadOnlyDictionary<string, string>(headers),
GetMetadataValue(metadata, "email"));
return true;
}
private static void PopulateScopes(JsonElement element, ICollection<string> scopes)
{
if (!TryGetPropertyIgnoreCase(element, "scopes", out var scopesElement))
{
return;
}
switch (scopesElement.ValueKind)
{
case JsonValueKind.Array:
foreach (var scope in scopesElement.EnumerateArray())
{
if (scope.ValueKind == JsonValueKind.String)
{
var value = scope.GetString();
if (!string.IsNullOrWhiteSpace(value))
{
scopes.Add(value.Trim());
}
}
}
break;
case JsonValueKind.String:
var text = scopesElement.GetString();
if (!string.IsNullOrWhiteSpace(text))
{
foreach (var part in text.Split(new[] { ',', ' ' }, StringSplitOptions.RemoveEmptyEntries))
{
scopes.Add(part.Trim());
}
}
break;
}
}
private static void PopulateMetadataScopes(IReadOnlyDictionary<string, string> metadata, ICollection<string> scopes)
{
foreach (var (key, value) in metadata)
{
if (!key.StartsWith("scope", StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (string.IsNullOrWhiteSpace(value))
{
continue;
}
scopes.Add(value.Trim());
}
}
private static void TryApplyBasicAuth(string auth, ref string? username, ref string? password)
{
try
{
var decoded = Encoding.UTF8.GetString(Convert.FromBase64String(auth));
var separator = decoded.IndexOf(':');
if (separator >= 0)
{
username ??= decoded[..separator];
password ??= decoded[(separator + 1)..];
}
}
catch (FormatException)
{
// ignore malformed auth; caller may still have explicit username/password fields
}
}
private static DateTimeOffset? ParseDateTime(JsonElement element, string propertyName)
{
if (!TryGetPropertyIgnoreCase(element, propertyName, out var value) ||
value.ValueKind != JsonValueKind.String)
{
return null;
}
var text = value.GetString();
if (string.IsNullOrWhiteSpace(text))
{
return null;
}
if (DateTimeOffset.TryParse(text, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed))
{
return parsed;
}
return null;
}
private static DateTimeOffset? ParseDateTime(IReadOnlyDictionary<string, string> metadata, string key)
{
var value = GetMetadataValue(metadata, key);
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed))
{
return parsed;
}
return null;
}
private static bool? ParseBoolean(IReadOnlyDictionary<string, string> metadata, string key)
{
var value = GetMetadataValue(metadata, key);
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (bool.TryParse(value, out var parsed))
{
return parsed;
}
return null;
}
}

View File

@@ -0,0 +1,122 @@
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading.Tasks;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Descriptor;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Manifest;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
using StellaOps.Scanner.Surface.FS;
using Xunit;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Descriptor;
public sealed class DescriptorCommandSurfaceTests
{
[Fact]
public async Task DescriptorCommand_PublishesSurfaceArtifacts()
{
await using var temp = new TempDirectory();
var casRoot = Path.Combine(temp.Path, "cas");
Directory.CreateDirectory(casRoot);
var sbomPath = Path.Combine(temp.Path, "sample.cdx.json");
await File.WriteAllTextAsync(sbomPath, "{\"bomFormat\":\"CycloneDX\",\"specVersion\":\"1.6\"}");
var layerFragmentsPath = Path.Combine(temp.Path, "layer-fragments.json");
await File.WriteAllTextAsync(layerFragmentsPath, "[]");
var entryTraceGraphPath = Path.Combine(temp.Path, "entrytrace-graph.json");
await File.WriteAllTextAsync(entryTraceGraphPath, "{\"nodes\":[],\"edges\":[]}");
var entryTraceNdjsonPath = Path.Combine(temp.Path, "entrytrace.ndjson");
await File.WriteAllTextAsync(entryTraceNdjsonPath, "{}\n{}");
var manifestOutputPath = Path.Combine(temp.Path, "out", "surface-manifest.json");
var repoRoot = TestPathHelper.FindRepositoryRoot();
var manifestDirectory = Path.Combine(repoRoot, "src", "Scanner", "StellaOps.Scanner.Sbomer.BuildXPlugin");
var pluginAssembly = typeof(BuildxPluginManifest).Assembly.Location;
var psi = new ProcessStartInfo("dotnet")
{
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
WorkingDirectory = repoRoot
};
psi.ArgumentList.Add(pluginAssembly);
psi.ArgumentList.Add("descriptor");
psi.ArgumentList.Add("--manifest");
psi.ArgumentList.Add(manifestDirectory);
psi.ArgumentList.Add("--cas");
psi.ArgumentList.Add(casRoot);
psi.ArgumentList.Add("--image");
psi.ArgumentList.Add("sha256:feedfacefeedfacefeedfacefeedfacefeedfacefeedfacefeedfacefeedface");
psi.ArgumentList.Add("--sbom");
psi.ArgumentList.Add(sbomPath);
psi.ArgumentList.Add("--sbom-name");
psi.ArgumentList.Add("sample.cdx.json");
psi.ArgumentList.Add("--surface-layer-fragments");
psi.ArgumentList.Add(layerFragmentsPath);
psi.ArgumentList.Add("--surface-entrytrace-graph");
psi.ArgumentList.Add(entryTraceGraphPath);
psi.ArgumentList.Add("--surface-entrytrace-ndjson");
psi.ArgumentList.Add(entryTraceNdjsonPath);
psi.ArgumentList.Add("--surface-cache-root");
psi.ArgumentList.Add(casRoot);
psi.ArgumentList.Add("--surface-tenant");
psi.ArgumentList.Add("test-tenant");
psi.ArgumentList.Add("--surface-manifest-output");
psi.ArgumentList.Add(manifestOutputPath);
var process = Process.Start(psi) ?? throw new InvalidOperationException("Failed to start BuildX plug-in process.");
var stdout = await process.StandardOutput.ReadToEndAsync();
var stderr = await process.StandardError.ReadToEndAsync();
await process.WaitForExitAsync();
Assert.True(process.ExitCode == 0, $"Descriptor command failed.\nSTDOUT: {stdout}\nSTDERR: {stderr}");
var descriptor = JsonSerializer.Deserialize<DescriptorDocument>(stdout, new JsonSerializerOptions(JsonSerializerDefaults.Web));
Assert.NotNull(descriptor);
Assert.Equal("stellaops.buildx.descriptor.v1", descriptor!.Schema);
Assert.Equal("sha256:d07d06ae82e1789a5b505731f3ec3add106e23a55395213c9a881c7e816c695c", descriptor.Artifact.Digest);
Assert.Contains("surface manifest stored", stderr, StringComparison.OrdinalIgnoreCase);
Assert.True(File.Exists(manifestOutputPath));
var surfaceManifestPath = Directory.GetFiles(Path.Combine(casRoot, "scanner", "surface", "manifests"), "*.json", SearchOption.AllDirectories).Single();
var manifestDocument = JsonSerializer.Deserialize<SurfaceManifestDocument>(await File.ReadAllTextAsync(surfaceManifestPath), new JsonSerializerOptions(JsonSerializerDefaults.Web));
Assert.NotNull(manifestDocument);
Assert.Equal("test-tenant", manifestDocument!.Tenant);
Assert.Equal(3, manifestDocument.Artifacts.Count);
foreach (var artifact in manifestDocument.Artifacts)
{
Assert.StartsWith("cas://", artifact.Uri, StringComparison.Ordinal);
var localPath = ResolveLocalPath(artifact.Uri, casRoot);
Assert.True(File.Exists(localPath), $"Missing CAS object for {artifact.Uri}");
}
}
private static string ResolveLocalPath(string casUri, string casRoot)
{
const string prefix = "cas://";
if (!casUri.StartsWith(prefix, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Unsupported CAS URI {casUri}.");
}
var slashIndex = casUri.IndexOf(/, prefix.Length);
if (slashIndex < 0)
{
throw new InvalidOperationException($"CAS URI {casUri} does not contain a bucket path.");
}
var relative = casUri[(slashIndex + 1)..];
var localPath = Path.Combine(casRoot, relative.Replace(/, Path.DirectorySeparatorChar));
return localPath;
}
}

View File

@@ -1,45 +1,45 @@
{
"schema": "stellaops.buildx.descriptor.v1",
"generatedAt": "2025-10-18T12:00:00\u002B00:00",
"generator": {
"name": "StellaOps.Scanner.Sbomer.BuildXPlugin",
"version": "1.2.3"
},
"subject": {
"mediaType": "application/vnd.oci.image.manifest.v1\u002Bjson",
"digest": "sha256:0123456789abcdef"
},
"artifact": {
"mediaType": "application/vnd.cyclonedx\u002Bjson",
"digest": "sha256:d07d06ae82e1789a5b505731f3ec3add106e23a55395213c9a881c7e816c695c",
"size": 45,
"annotations": {
"org.opencontainers.artifact.type": "application/vnd.stellaops.sbom.layer\u002Bjson",
"org.stellaops.scanner.version": "1.2.3",
"org.stellaops.sbom.kind": "inventory",
"org.stellaops.sbom.format": "cyclonedx-json",
"org.stellaops.provenance.status": "pending",
"org.stellaops.provenance.dsse.sha256": "sha256:1b364a6b888d580feb8565f7b6195b24535ca8201b4bcac58da063b32c47220d",
"org.stellaops.provenance.nonce": "a608acf859cd58a8389816b8d9eb2a07",
"org.stellaops.license.id": "lic-123",
"org.opencontainers.image.title": "sample.cdx.json",
"org.stellaops.repository": "git.stella-ops.org/stellaops"
}
},
"provenance": {
"status": "pending",
"expectedDsseSha256": "sha256:1b364a6b888d580feb8565f7b6195b24535ca8201b4bcac58da063b32c47220d",
"nonce": "a608acf859cd58a8389816b8d9eb2a07",
"attestorUri": "https://attestor.local/api/v1/provenance",
"predicateType": "https://slsa.dev/provenance/v1"
},
"metadata": {
"sbomDigest": "sha256:d07d06ae82e1789a5b505731f3ec3add106e23a55395213c9a881c7e816c695c",
"sbomPath": "sample.cdx.json",
"sbomMediaType": "application/vnd.cyclonedx\u002Bjson",
"subjectMediaType": "application/vnd.oci.image.manifest.v1\u002Bjson",
"repository": "git.stella-ops.org/stellaops",
"buildRef": "refs/heads/main",
"attestorUri": "https://attestor.local/api/v1/provenance"
}
{
"schema": "stellaops.buildx.descriptor.v1",
"generatedAt": "2025-10-18T12:00:00\u002B00:00",
"generator": {
"name": "StellaOps.Scanner.Sbomer.BuildXPlugin",
"version": "1.2.3"
},
"subject": {
"mediaType": "application/vnd.oci.image.manifest.v1\u002Bjson",
"digest": "sha256:0123456789abcdef"
},
"artifact": {
"mediaType": "application/vnd.cyclonedx\u002Bjson",
"digest": "sha256:d07d06ae82e1789a5b505731f3ec3add106e23a55395213c9a881c7e816c695c",
"size": 45,
"annotations": {
"org.opencontainers.artifact.type": "application/vnd.stellaops.sbom.layer\u002Bjson",
"org.stellaops.scanner.version": "1.2.3",
"org.stellaops.sbom.kind": "inventory",
"org.stellaops.sbom.format": "cyclonedx-json",
"org.stellaops.provenance.status": "pending",
"org.stellaops.provenance.dsse.sha256": "sha256:35ab4784f3bad40bb0063b522939ac729cf43d2012059947c0e56475d682c05e",
"org.stellaops.provenance.nonce": "5e13230e3dcbc8be996d8132d92e8826",
"org.stellaops.license.id": "lic-123",
"org.opencontainers.image.title": "sample.cdx.json",
"org.stellaops.repository": "git.stella-ops.org/stellaops"
}
},
"provenance": {
"status": "pending",
"expectedDsseSha256": "sha256:35ab4784f3bad40bb0063b522939ac729cf43d2012059947c0e56475d682c05e",
"nonce": "5e13230e3dcbc8be996d8132d92e8826",
"attestorUri": "https://attestor.local/api/v1/provenance",
"predicateType": "https://slsa.dev/provenance/v1"
},
"metadata": {
"sbomDigest": "sha256:d07d06ae82e1789a5b505731f3ec3add106e23a55395213c9a881c7e816c695c",
"sbomPath": "sample.cdx.json",
"sbomMediaType": "application/vnd.cyclonedx\u002Bjson",
"subjectMediaType": "application/vnd.oci.image.manifest.v1\u002Bjson",
"repository": "git.stella-ops.org/stellaops",
"buildRef": "refs/heads/main",
"attestorUri": "https://attestor.local/api/v1/provenance"
}
}

View File

@@ -0,0 +1,95 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Surface;
using StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
using Xunit;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.Surface;
public sealed class SurfaceManifestWriterTests
{
[Fact]
public async Task WriteAsync_PersistsArtifactsAndManifest()
{
await using var temp = new TempDirectory();
var fragmentsPath = Path.Combine(temp.Path, "layer-fragments.json");
await File.WriteAllTextAsync(fragmentsPath, "[]");
var graphPath = Path.Combine(temp.Path, "entrytrace-graph.json");
await File.WriteAllTextAsync(graphPath, "{\"nodes\":[],\"edges\":[]}");
var ndjsonPath = Path.Combine(temp.Path, "entrytrace.ndjson");
await File.WriteAllTextAsync(ndjsonPath, "{}\n{}");
var manifestOutputPath = Path.Combine(temp.Path, "out", "surface-manifest.json");
var options = new SurfaceOptions(
CacheRoot: temp.Path,
CacheBucket: "scanner-artifacts",
RootPrefix: "scanner",
Tenant: "tenant-a",
Component: "scanner.buildx",
ComponentVersion: "1.2.3",
WorkerInstance: "builder-01",
Attempt: 2,
ImageDigest: "sha256:feedface",
ScanId: "scan-123",
LayerFragmentsPath: fragmentsPath,
EntryTraceGraphPath: graphPath,
EntryTraceNdjsonPath: ndjsonPath,
ManifestOutputPath: manifestOutputPath);
var writer = new SurfaceManifestWriter(TimeProvider.System);
var result = await writer.WriteAsync(options, CancellationToken.None);
Assert.NotNull(result);
Assert.NotNull(result!.Document.Source);
Assert.Equal("tenant-a", result.Document.Tenant);
Assert.Equal("scanner.buildx", result.Document.Source!.Component);
Assert.Equal("1.2.3", result.Document.Source.Version);
Assert.Equal(3, result.Document.Artifacts.Count);
var kinds = result.Document.Artifacts.Select(a => a.Kind).ToHashSet();
Assert.Contains("entrytrace.graph", kinds);
Assert.Contains("entrytrace.ndjson", kinds);
Assert.Contains("layer.fragments", kinds);
Assert.True(File.Exists(result.ManifestPath));
Assert.True(File.Exists(manifestOutputPath));
foreach (var artifact in result.Artifacts)
{
Assert.True(File.Exists(artifact.FilePath));
Assert.False(string.IsNullOrWhiteSpace(artifact.ManifestArtifact.Uri));
Assert.StartsWith("cas://scanner-artifacts/", artifact.ManifestArtifact.Uri, StringComparison.Ordinal);
}
}
[Fact]
public async Task WriteAsync_NoArtifacts_ReturnsNull()
{
await using var temp = new TempDirectory();
var options = new SurfaceOptions(
CacheRoot: temp.Path,
CacheBucket: "scanner-artifacts",
RootPrefix: "scanner",
Tenant: "tenant-a",
Component: "scanner.buildx",
ComponentVersion: "1.0",
WorkerInstance: "builder-01",
Attempt: 1,
ImageDigest: "sha256:deadbeef",
ScanId: "scan-1",
LayerFragmentsPath: null,
EntryTraceGraphPath: null,
EntryTraceNdjsonPath: null,
ManifestOutputPath: null);
var writer = new SurfaceManifestWriter(TimeProvider.System);
var result = await writer.WriteAsync(options, CancellationToken.None);
Assert.Null(result);
}
}

View File

@@ -0,0 +1,23 @@
using System;
using System.IO;
namespace StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.TestUtilities;
internal static class TestPathHelper
{
public static string FindRepositoryRoot()
{
var current = AppContext.BaseDirectory;
for (var i = 0; i < 15 && !string.IsNullOrWhiteSpace(current); i++)
{
if (File.Exists(Path.Combine(current, "global.json")))
{
return current;
}
current = Directory.GetParent(current)?.FullName;
}
throw new InvalidOperationException("Unable to locate repository root (global.json not found).");
}
}

View File

@@ -0,0 +1,112 @@
using System;
using System.Collections.Generic;
using System.Text;
using StellaOps.Scanner.Surface.Secrets;
using Xunit;
namespace StellaOps.Scanner.Surface.Secrets.Tests;
public sealed class RegistryAccessSecretParserTests
{
[Fact]
public void ParseRegistrySecret_WithEntriesArray_ReturnsCredential()
{
const string json = """
{
"defaultRegistry": "registry.example.com",
"entries": [
{
"registry": "registry.example.com",
"username": "demo",
"password": "s3cret",
"token": "token-123",
"identityToken": "identity-token",
"refreshToken": "refresh-token",
"expiresAt": "2025-12-01T10:00:00Z",
"allowInsecureTls": false,
"scopes": ["repo:sample:pull"],
"headers": {
"X-Test": "value"
},
"email": "demo@example.com"
}
]
}
""";
using var handle = SurfaceSecretHandle.FromBytes(Encoding.UTF8.GetBytes(json));
var secret = SurfaceSecretParser.ParseRegistryAccessSecret(handle);
Assert.Equal("registry.example.com", secret.DefaultRegistry);
var entry = Assert.Single(secret.Entries);
Assert.Equal("registry.example.com", entry.Registry);
Assert.Equal("demo", entry.Username);
Assert.Equal("s3cret", entry.Password);
Assert.Equal("token-123", entry.RegistryToken);
Assert.Equal("identity-token", entry.IdentityToken);
Assert.Equal("refresh-token", entry.RefreshToken);
Assert.Equal("demo@example.com", entry.Email);
Assert.Equal(new DateTimeOffset(2025, 12, 1, 10, 0, 0, TimeSpan.Zero), entry.ExpiresAt);
Assert.Equal(false, entry.AllowInsecureTls);
Assert.Contains("repo:sample:pull", entry.Scopes);
Assert.Equal("value", entry.Headers["X-Test"]);
}
[Fact]
public void ParseRegistrySecret_WithDockerAuthsObject_DecodesBasicAuth()
{
const string json = """
{
"auths": {
"ghcr.io": {
"auth": "ZGVtbzpwYXNz",
"identitytoken": "id-token"
}
}
}
""";
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["token"] = "metadata-token"
};
using var handle = SurfaceSecretHandle.FromBytes(Encoding.UTF8.GetBytes(json), metadata);
var secret = SurfaceSecretParser.ParseRegistryAccessSecret(handle);
var entry = Assert.Single(secret.Entries);
Assert.Equal("ghcr.io", entry.Registry);
Assert.Equal("demo", entry.Username);
Assert.Equal("pass", entry.Password);
Assert.Equal("metadata-token", entry.RegistryToken);
Assert.Equal("id-token", entry.IdentityToken);
}
[Fact]
public void ParseRegistrySecret_MetadataFallback_ReturnsCredential()
{
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["registry"] = "registry.internal",
["username"] = "meta-user",
["password"] = "meta-pass",
["scope:0"] = "repo:internal:pull",
["header:X-From"] = "metadata",
["defaultRegistry"] = "registry.internal",
["expiresAt"] = "2025-11-10T00:00:00Z",
["allowInsecureTls"] = "true"
};
using var handle = SurfaceSecretHandle.FromBytes(ReadOnlySpan<byte>.Empty, metadata);
var secret = SurfaceSecretParser.ParseRegistryAccessSecret(handle);
var entry = Assert.Single(secret.Entries);
Assert.Equal("registry.internal", entry.Registry);
Assert.Equal("meta-user", entry.Username);
Assert.Equal("meta-pass", entry.Password);
Assert.Contains("repo:internal:pull", entry.Scopes);
Assert.Equal("metadata", entry.Headers["X-From"]);
Assert.True(entry.AllowInsecureTls);
Assert.Equal("registry.internal", secret.DefaultRegistry);
}
}

View File

@@ -30,7 +30,10 @@ public sealed class ScannerSurfaceSecretConfiguratorTests
""";
using var handle = SurfaceSecretHandle.FromBytes(Encoding.UTF8.GetBytes(json));
var secretProvider = new StubSecretProvider(handle);
var secretProvider = new StubSecretProvider(new Dictionary<string, SurfaceSecretHandle>(StringComparer.OrdinalIgnoreCase)
{
["cas-access"] = handle
});
var environment = new StubSurfaceEnvironment();
var options = new ScannerWebServiceOptions();
@@ -82,17 +85,101 @@ public sealed class ScannerSurfaceSecretConfiguratorTests
Assert.Equal("X-Sync", storageOptions.ObjectStore.RustFs.ApiKeyHeader);
}
[Fact]
public void Configure_AppliesAttestationSecretToSigning()
{
const string json = """
{
"keyPem": "-----BEGIN KEY-----\nYWJj\n-----END KEY-----",
"certificatePem": "CERT-PEM",
"certificateChainPem": "CHAIN-PEM"
}
""";
using var handle = SurfaceSecretHandle.FromBytes(Encoding.UTF8.GetBytes(json));
var secretProvider = new StubSecretProvider(new Dictionary<string, SurfaceSecretHandle>(StringComparer.OrdinalIgnoreCase)
{
["attestation"] = handle
});
var environment = new StubSurfaceEnvironment();
var options = new ScannerWebServiceOptions();
var configurator = new ScannerSurfaceSecretConfigurator(
secretProvider,
environment,
NullLogger<ScannerSurfaceSecretConfigurator>.Instance);
configurator.Configure(options);
Assert.Equal("-----BEGIN KEY-----\nYWJj\n-----END KEY-----", options.Signing.KeyPem);
Assert.Equal("CERT-PEM", options.Signing.CertificatePem);
Assert.Equal("CHAIN-PEM", options.Signing.CertificateChainPem);
}
[Fact]
public void Configure_AppliesRegistrySecretToOptions()
{
const string json = """
{
"defaultRegistry": "registry.example.com",
"entries": [
{
"registry": "registry.example.com",
"username": "demo",
"password": "secret",
"scopes": ["repo:sample:pull"],
"headers": { "X-Test": "value" },
"allowInsecureTls": true,
"email": "demo@example.com"
}
]
}
""";
using var handle = SurfaceSecretHandle.FromBytes(Encoding.UTF8.GetBytes(json));
var secretProvider = new StubSecretProvider(new Dictionary<string, SurfaceSecretHandle>(StringComparer.OrdinalIgnoreCase)
{
["registry"] = handle
});
var environment = new StubSurfaceEnvironment();
var options = new ScannerWebServiceOptions();
var configurator = new ScannerSurfaceSecretConfigurator(
secretProvider,
environment,
NullLogger<ScannerSurfaceSecretConfigurator>.Instance);
configurator.Configure(options);
Assert.Equal("registry.example.com", options.Registry.DefaultRegistry);
var credential = Assert.Single(options.Registry.Credentials);
Assert.Equal("registry.example.com", credential.Registry);
Assert.Equal("demo", credential.Username);
Assert.Equal("secret", credential.Password);
Assert.True(credential.AllowInsecureTls);
Assert.Contains("repo:sample:pull", credential.Scopes);
Assert.Equal("value", credential.Headers["X-Test"]);
Assert.Equal("demo@example.com", credential.Email);
}
private sealed class StubSecretProvider : ISurfaceSecretProvider
{
private readonly SurfaceSecretHandle _handle;
private readonly IDictionary<string, SurfaceSecretHandle> _handles;
public StubSecretProvider(SurfaceSecretHandle handle)
public StubSecretProvider(IDictionary<string, SurfaceSecretHandle> handles)
{
_handle = handle;
_handles = handles ?? throw new ArgumentNullException(nameof(handles));
}
public ValueTask<SurfaceSecretHandle> GetAsync(SurfaceSecretRequest request, CancellationToken cancellationToken = default)
=> ValueTask.FromResult(_handle);
{
if (_handles.TryGetValue(request.SecretType, out var handle))
{
return ValueTask.FromResult(handle);
}
throw new SurfaceSecretNotFoundException(request);
}
}
private sealed class StubSurfaceEnvironment : ISurfaceEnvironment

View File

@@ -9,7 +9,7 @@ using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading.Tasks;
using System.Threading;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.AspNetCore.TestHost;
using Microsoft.Extensions.DependencyInjection;
@@ -17,6 +17,7 @@ using StellaOps.Scanner.EntryTrace;
using StellaOps.Scanner.EntryTrace.Serialization;
using StellaOps.Scanner.Storage.Catalog;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.Storage.ObjectStore;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.WebService.Services;
@@ -92,39 +93,88 @@ public sealed class ScansEndpointsTests
using var factory = new ScannerApplicationFactory();
const string manifestDigest = "sha256:b2efc2d1f8b042b7f168bcb7d4e2f8e91d36b8306bd855382c5f847efc2c1111";
const string graphDigest = "sha256:9a0d4f8c7b6a5e4d3c2b1a0f9e8d7c6b5a4f3e2d1c0b9a8f7e6d5c4b3a291819";
const string ndjsonDigest = "sha256:3f2e1d0c9b8a7f6e5d4c3b2a1908f7e6d5c4b3a29181726354433221100ffeec";
const string fragmentsDigest = "sha256:aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55aa55";
using (var scope = factory.Services.CreateScope())
{
var artifactRepository = scope.ServiceProvider.GetRequiredService<ArtifactRepository>();
var linkRepository = scope.ServiceProvider.GetRequiredService<LinkRepository>();
var artifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, digest);
var now = DateTime.UtcNow;
var artifact = new ArtifactDocument
async Task InsertAsync(
ArtifactDocumentType type,
ArtifactDocumentFormat format,
string artifactDigest,
string mediaType,
string ttlClass)
{
Id = artifactId,
Type = ArtifactDocumentType.ImageBom,
Format = ArtifactDocumentFormat.CycloneDxJson,
MediaType = "application/vnd.cyclonedx+json; version=1.6; view=inventory",
BytesSha256 = digest,
SizeBytes = 2048,
Immutable = true,
RefCount = 1,
TtlClass = "default",
CreatedAtUtc = DateTime.UtcNow,
UpdatedAtUtc = DateTime.UtcNow
};
var artifactId = CatalogIdFactory.CreateArtifactId(type, artifactDigest);
var document = new ArtifactDocument
{
Id = artifactId,
Type = type,
Format = format,
MediaType = mediaType,
BytesSha256 = artifactDigest,
SizeBytes = 2048,
Immutable = true,
RefCount = 1,
TtlClass = ttlClass,
CreatedAtUtc = now,
UpdatedAtUtc = now
};
await artifactRepository.UpsertAsync(artifact, CancellationToken.None).ConfigureAwait(false);
await artifactRepository.UpsertAsync(document, CancellationToken.None).ConfigureAwait(false);
var link = new LinkDocument
{
Id = CatalogIdFactory.CreateLinkId(LinkSourceType.Image, digest, artifactId),
FromType = LinkSourceType.Image,
FromDigest = digest,
ArtifactId = artifactId,
CreatedAtUtc = DateTime.UtcNow
};
var link = new LinkDocument
{
Id = CatalogIdFactory.CreateLinkId(LinkSourceType.Image, digest, artifactId),
FromType = LinkSourceType.Image,
FromDigest = digest,
ArtifactId = artifactId,
CreatedAtUtc = now
};
await linkRepository.UpsertAsync(link, CancellationToken.None).ConfigureAwait(false);
await linkRepository.UpsertAsync(link, CancellationToken.None).ConfigureAwait(false);
}
await InsertAsync(
ArtifactDocumentType.ImageBom,
ArtifactDocumentFormat.CycloneDxJson,
digest,
"application/vnd.cyclonedx+json; version=1.6; view=inventory",
"default").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceManifest,
ArtifactDocumentFormat.SurfaceManifestJson,
manifestDigest,
"application/vnd.stellaops.surface.manifest+json",
"surface.manifest").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceEntryTrace,
ArtifactDocumentFormat.EntryTraceGraphJson,
graphDigest,
"application/json",
"surface.payload").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceEntryTrace,
ArtifactDocumentFormat.EntryTraceNdjson,
ndjsonDigest,
"application/x-ndjson",
"surface.payload").ConfigureAwait(false);
await InsertAsync(
ArtifactDocumentType.SurfaceLayerFragment,
ArtifactDocumentFormat.ComponentFragmentJson,
fragmentsDigest,
"application/json",
"surface.payload").ConfigureAwait(false);
}
using var client = factory.CreateClient();
@@ -160,15 +210,46 @@ public sealed class ScansEndpointsTests
Assert.Equal(digest, manifest.ImageDigest);
Assert.Equal(surface.Tenant, manifest.Tenant);
Assert.NotEqual(default, manifest.GeneratedAt);
var manifestArtifact = Assert.Single(manifest.Artifacts);
Assert.Equal("sbom-inventory", manifestArtifact.Kind);
Assert.Equal("cdx-json", manifestArtifact.Format);
Assert.Equal(digest, manifestArtifact.Digest);
Assert.Equal("application/vnd.cyclonedx+json; version=1.6; view=inventory", manifestArtifact.MediaType);
Assert.Equal("inventory", manifestArtifact.View);
var artifactsByKind = manifest.Artifacts.ToDictionary(a => a.Kind, StringComparer.Ordinal);
Assert.Equal(5, artifactsByKind.Count);
var expectedUri = $"cas://scanner-artifacts/scanner/images/{digestValue}/sbom.cdx.json";
Assert.Equal(expectedUri, manifestArtifact.Uri);
static string BuildUri(ArtifactDocumentType type, ArtifactDocumentFormat format, string digestValue)
=> $"cas://scanner-artifacts/{ArtifactObjectKeyBuilder.Build(type, format, digestValue, \"scanner\")}";
var inventory = artifactsByKind["sbom-inventory"];
Assert.Equal(digest, inventory.Digest);
Assert.Equal("cdx-json", inventory.Format);
Assert.Equal("application/vnd.cyclonedx+json; version=1.6; view=inventory", inventory.MediaType);
Assert.Equal("inventory", inventory.View);
Assert.Equal(BuildUri(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxJson, digest), inventory.Uri);
var manifestArtifact = artifactsByKind["surface.manifest"];
Assert.Equal(manifestDigest, manifestArtifact.Digest);
Assert.Equal("surface.manifest", manifestArtifact.Format);
Assert.Equal("application/vnd.stellaops.surface.manifest+json", manifestArtifact.MediaType);
Assert.Null(manifestArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceManifest, ArtifactDocumentFormat.SurfaceManifestJson, manifestDigest), manifestArtifact.Uri);
var graphArtifact = artifactsByKind["entrytrace.graph"];
Assert.Equal(graphDigest, graphArtifact.Digest);
Assert.Equal("entrytrace.graph", graphArtifact.Format);
Assert.Equal("application/json", graphArtifact.MediaType);
Assert.Null(graphArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceEntryTrace, ArtifactDocumentFormat.EntryTraceGraphJson, graphDigest), graphArtifact.Uri);
var ndjsonArtifact = artifactsByKind["entrytrace.ndjson"];
Assert.Equal(ndjsonDigest, ndjsonArtifact.Digest);
Assert.Equal("entrytrace.ndjson", ndjsonArtifact.Format);
Assert.Equal("application/x-ndjson", ndjsonArtifact.MediaType);
Assert.Null(ndjsonArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceEntryTrace, ArtifactDocumentFormat.EntryTraceNdjson, ndjsonDigest), ndjsonArtifact.Uri);
var fragmentsArtifact = artifactsByKind["layer.fragments"];
Assert.Equal(fragmentsDigest, fragmentsArtifact.Digest);
Assert.Equal("layer.fragments", fragmentsArtifact.Format);
Assert.Equal("application/json", fragmentsArtifact.MediaType);
Assert.Equal("inventory", fragmentsArtifact.View);
Assert.Equal(BuildUri(ArtifactDocumentType.SurfaceLayerFragment, ArtifactDocumentFormat.ComponentFragmentJson, fragmentsDigest), fragmentsArtifact.Uri);
}
[Fact]

View File

@@ -0,0 +1,221 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.Secrets;
using StellaOps.Scanner.Worker.Diagnostics;
using StellaOps.Scanner.Worker.Processing;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests;
public sealed class RegistrySecretStageExecutorTests
{
[Fact]
public async Task ExecuteAsync_WithSecret_StoresCredentialsAndEmitsMetrics()
{
const string secretJson = """
{
"defaultRegistry": "registry.example.com",
"entries": [
{
"registry": "registry.example.com",
"username": "demo",
"password": "s3cret",
"expiresAt": "2099-01-01T00:00:00Z"
}
]
}
""";
var provider = new StubSecretProvider(secretJson);
var environment = new StubSurfaceEnvironment("tenant-eu");
var metrics = new ScannerWorkerMetrics();
var timeProvider = TimeProvider.System;
var executor = new RegistrySecretStageExecutor(
provider,
environment,
metrics,
timeProvider,
NullLogger<RegistrySecretStageExecutor>.Instance);
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["surface.registry.secret"] = "primary"
};
var lease = new StubLease("job-1", "scan-1", metadata);
using var contextCancellation = CancellationTokenSource.CreateLinkedTokenSource(CancellationToken.None);
var context = new ScanJobContext(lease, timeProvider, timeProvider.GetUtcNow(), contextCancellation.Token);
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
using var listener = CreateCounterListener("scanner_worker_registry_secret_requests_total", measurements);
await executor.ExecuteAsync(context, CancellationToken.None);
listener.RecordObservableInstruments();
Assert.True(context.Analysis.TryGet<RegistryAccessSecret>(ScanAnalysisKeys.RegistryCredentials, out var secret));
Assert.NotNull(secret);
Assert.Single(secret!.Entries);
Assert.Contains(
measurements,
measurement => measurement.Value == 1 &&
HasTagValue(measurement.Tags, "secret.result", "resolved") &&
HasTagValue(measurement.Tags, "secret.name", "primary"));
}
[Fact]
public async Task ExecuteAsync_SecretMissing_RecordsMissingMetric()
{
var provider = new MissingSecretProvider();
var environment = new StubSurfaceEnvironment("tenant-eu");
var metrics = new ScannerWorkerMetrics();
var executor = new RegistrySecretStageExecutor(
provider,
environment,
metrics,
TimeProvider.System,
NullLogger<RegistrySecretStageExecutor>.Instance);
var lease = new StubLease("job-2", "scan-2", new Dictionary<string, string>());
var context = new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None);
var measurements = new List<(long Value, KeyValuePair<string, object?>[] Tags)>();
using var listener = CreateCounterListener("scanner_worker_registry_secret_requests_total", measurements);
await executor.ExecuteAsync(context, CancellationToken.None);
listener.RecordObservableInstruments();
Assert.False(context.Analysis.TryGet<RegistryAccessSecret>(ScanAnalysisKeys.RegistryCredentials, out _));
Assert.Contains(
measurements,
measurement => measurement.Value == 1 &&
HasTagValue(measurement.Tags, "secret.result", "missing") &&
HasTagValue(measurement.Tags, "secret.name", "default"));
}
private static MeterListener CreateCounterListener(
string instrumentName,
ICollection<(long Value, KeyValuePair<string, object?>[] Tags)> measurements)
{
var listener = new MeterListener
{
InstrumentPublished = (instrument, meterListener) =>
{
if (instrument.Meter.Name == ScannerWorkerInstrumentation.MeterName &&
instrument.Name == instrumentName)
{
meterListener.EnableMeasurementEvents(instrument);
}
}
};
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
{
var copy = tags.ToArray();
measurements.Add((measurement, copy));
});
listener.Start();
return listener;
}
private static bool HasTagValue(IEnumerable<KeyValuePair<string, object?>> tags, string key, string expected)
=> tags.Any(tag => string.Equals(tag.Key, key, StringComparison.OrdinalIgnoreCase) &&
string.Equals(tag.Value?.ToString(), expected, StringComparison.OrdinalIgnoreCase));
private sealed class StubSecretProvider : ISurfaceSecretProvider
{
private readonly string _json;
public StubSecretProvider(string json)
{
_json = json;
}
public ValueTask<SurfaceSecretHandle> GetAsync(SurfaceSecretRequest request, CancellationToken cancellationToken = default)
{
var bytes = Encoding.UTF8.GetBytes(_json);
return ValueTask.FromResult(SurfaceSecretHandle.FromBytes(bytes));
}
}
private sealed class MissingSecretProvider : ISurfaceSecretProvider
{
public ValueTask<SurfaceSecretHandle> GetAsync(SurfaceSecretRequest request, CancellationToken cancellationToken = default)
=> throw new SurfaceSecretNotFoundException(request);
}
private sealed class StubSurfaceEnvironment : ISurfaceEnvironment
{
public StubSurfaceEnvironment(string tenant)
{
Settings = new SurfaceEnvironmentSettings(
new Uri("https://surface.example"),
"bucket",
"region",
new DirectoryInfo(Path.GetTempPath()),
1024,
false,
Array.Empty<string>(),
new SurfaceSecretsConfiguration("inline", tenant, null, null, null, AllowInline: true),
tenant,
new SurfaceTlsConfiguration(null, null, null))
{
CreatedAtUtc = DateTimeOffset.UtcNow
};
RawVariables = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
public SurfaceEnvironmentSettings Settings { get; }
public IReadOnlyDictionary<string, string> RawVariables { get; }
}
private sealed class StubLease : IScanJobLease
{
private readonly IReadOnlyDictionary<string, string> _metadata;
public StubLease(string jobId, string scanId, IReadOnlyDictionary<string, string> metadata)
{
JobId = jobId;
ScanId = scanId;
_metadata = metadata;
EnqueuedAtUtc = DateTimeOffset.UtcNow.AddMinutes(-1);
LeasedAtUtc = DateTimeOffset.UtcNow;
}
public string JobId { get; }
public string ScanId { get; }
public int Attempt { get; } = 1;
public DateTimeOffset EnqueuedAtUtc { get; }
public DateTimeOffset LeasedAtUtc { get; }
public TimeSpan LeaseDuration { get; } = TimeSpan.FromMinutes(5);
public IReadOnlyDictionary<string, string> Metadata => _metadata;
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
}
}

View File

@@ -0,0 +1,349 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Core.Contracts;
using StellaOps.Scanner.EntryTrace;
using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.FS;
using StellaOps.Scanner.Worker.Diagnostics;
using StellaOps.Scanner.Worker.Processing;
using StellaOps.Scanner.Worker.Processing.Surface;
using StellaOps.Scanner.Worker.Tests.TestInfrastructure;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests;
public sealed class SurfaceManifestStageExecutorTests
{
[Fact]
public async Task ExecuteAsync_WhenNoPayloads_SkipsPublishAndRecordsSkipMetric()
{
var metrics = new ScannerWorkerMetrics();
var publisher = new TestSurfaceManifestPublisher();
var cache = new RecordingSurfaceCache();
var environment = new TestSurfaceEnvironment("tenant-a");
using var listener = new WorkerMeterListener();
listener.Start();
var executor = new SurfaceManifestStageExecutor(
publisher,
cache,
environment,
metrics,
NullLogger<SurfaceManifestStageExecutor>.Instance);
var context = CreateContext();
await executor.ExecuteAsync(context, CancellationToken.None);
Assert.Equal(0, publisher.PublishCalls);
Assert.Empty(cache.Entries);
var skipMetrics = listener.Measurements
.Where(m => m.InstrumentName == "scanner_worker_surface_manifests_skipped_total")
.ToArray();
Assert.Single(skipMetrics);
Assert.Equal(1, skipMetrics[0].Value);
Assert.Equal("skipped", skipMetrics[0]["surface.result"]);
}
[Fact]
public async Task ExecuteAsync_PublishesPayloads_CachesArtifacts_AndRecordsMetrics()
{
var metrics = new ScannerWorkerMetrics();
var publisher = new TestSurfaceManifestPublisher("tenant-a");
var cache = new RecordingSurfaceCache();
var environment = new TestSurfaceEnvironment("tenant-a");
using var listener = new WorkerMeterListener();
listener.Start();
var executor = new SurfaceManifestStageExecutor(
publisher,
cache,
environment,
metrics,
NullLogger<SurfaceManifestStageExecutor>.Instance);
var context = CreateContext();
PopulateAnalysis(context);
await executor.ExecuteAsync(context, CancellationToken.None);
Assert.Equal(1, publisher.PublishCalls);
Assert.True(context.Analysis.TryGet<SurfaceManifestPublishResult>(ScanAnalysisKeys.SurfaceManifest, out var result));
Assert.NotNull(result);
Assert.Equal(publisher.LastManifestDigest, result!.ManifestDigest);
Assert.Equal(4, cache.Entries.Count);
Assert.Contains(cache.Entries.Keys, key => key.Namespace == "surface.artifacts.entrytrace.graph" && key.Tenant == "tenant-a");
Assert.Contains(cache.Entries.Keys, key => key.Namespace == "surface.artifacts.entrytrace.ndjson" && key.Tenant == "tenant-a");
Assert.Contains(cache.Entries.Keys, key => key.Namespace == "surface.artifacts.layer.fragments" && key.Tenant == "tenant-a");
Assert.Contains(cache.Entries.Keys, key => key.Namespace == "surface.manifests" && key.Tenant == "tenant-a");
var publishedMetrics = listener.Measurements
.Where(m => m.InstrumentName == "scanner_worker_surface_manifests_published_total")
.ToArray();
Assert.Single(publishedMetrics);
Assert.Equal(1, publishedMetrics[0].Value);
Assert.Equal("published", publishedMetrics[0]["surface.result"]);
Assert.Equal(3, Convert.ToInt32(publishedMetrics[0]["surface.payload_count"]));
var payloadMetrics = listener.Measurements
.Where(m => m.InstrumentName == "scanner_worker_surface_payload_persisted_total")
.ToArray();
Assert.Equal(3, payloadMetrics.Length);
Assert.Contains(payloadMetrics, m => Equals("entrytrace.graph", m["surface.kind"]));
Assert.Contains(payloadMetrics, m => Equals("entrytrace.ndjson", m["surface.kind"]));
Assert.Contains(payloadMetrics, m => Equals("layer.fragments", m["surface.kind"]));
}
private static ScanJobContext CreateContext()
{
var lease = new FakeJobLease();
return new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None);
}
private static void PopulateAnalysis(ScanJobContext context)
{
var node = new EntryTraceNode(
Id: 1,
Kind: EntryTraceNodeKind.Command,
DisplayName: "/bin/entry",
Arguments: ImmutableArray<string>.Empty,
InterpreterKind: EntryTraceInterpreterKind.None,
Evidence: null,
Span: null,
Metadata: null);
var graph = new EntryTraceGraph(
Outcome: EntryTraceOutcome.Resolved,
Nodes: ImmutableArray.Create(node),
Edges: ImmutableArray<EntryTraceEdge>.Empty,
Diagnostics: ImmutableArray<EntryTraceDiagnostic>.Empty,
Plans: ImmutableArray<EntryTracePlan>.Empty,
Terminals: ImmutableArray<EntryTraceTerminal>.Empty);
context.Analysis.Set(ScanAnalysisKeys.EntryTraceGraph, graph);
var ndjson = ImmutableArray.Create("{\"entry\":\"/bin/entry\"}\n");
context.Analysis.Set(ScanAnalysisKeys.EntryTraceNdjson, ndjson);
var component = new ComponentRecord
{
Identity = ComponentIdentity.Create("pkg:test", "test", "1.0.0"),
LayerDigest = "sha256:layer-1",
Evidence = ImmutableArray<ComponentEvidence>.Empty,
Usage = ComponentUsage.Create(true, new[] { "/bin/entry" })
};
var fragment = LayerComponentFragment.Create("sha256:layer-1", new[] { component });
context.Analysis.Set(ScanAnalysisKeys.LayerComponentFragments, ImmutableArray.Create(fragment));
}
private sealed class RecordingSurfaceCache : ISurfaceCache
{
private readonly Dictionary<SurfaceCacheKey, byte[]> _entries = new();
public IReadOnlyDictionary<SurfaceCacheKey, byte[]> Entries => _entries;
public Task<T> GetOrCreateAsync<T>(
SurfaceCacheKey key,
Func<CancellationToken, Task<T>> factory,
Func<T, ReadOnlyMemory<byte>> serializer,
Func<ReadOnlyMemory<byte>, T> deserializer,
CancellationToken cancellationToken = default)
{
if (_entries.TryGetValue(key, out var payload))
{
return Task.FromResult(deserializer(payload));
}
return CreateAsync(key, factory, serializer, cancellationToken);
}
public Task<T?> TryGetAsync<T>(
SurfaceCacheKey key,
Func<ReadOnlyMemory<byte>, T> deserializer,
CancellationToken cancellationToken = default)
{
if (_entries.TryGetValue(key, out var payload))
{
return Task.FromResult<T?>(deserializer(payload));
}
return Task.FromResult<T?>(default);
}
public Task SetAsync(
SurfaceCacheKey key,
ReadOnlyMemory<byte> payload,
CancellationToken cancellationToken = default)
{
_entries[key] = payload.ToArray();
return Task.CompletedTask;
}
private async Task<T> CreateAsync<T>(
SurfaceCacheKey key,
Func<CancellationToken, Task<T>> factory,
Func<T, ReadOnlyMemory<byte>> serializer,
CancellationToken cancellationToken)
{
var value = await factory(cancellationToken).ConfigureAwait(false);
_entries[key] = serializer(value).ToArray();
return value;
}
}
private sealed class TestSurfaceManifestPublisher : ISurfaceManifestPublisher
{
private readonly string _tenant;
private readonly JsonSerializerOptions _options = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public TestSurfaceManifestPublisher(string tenant = "tenant-a")
{
_tenant = tenant;
}
public int PublishCalls { get; private set; }
public SurfaceManifestRequest? LastRequest { get; private set; }
public string? LastManifestDigest { get; private set; }
public Task<SurfaceManifestPublishResult> PublishAsync(SurfaceManifestRequest request, CancellationToken cancellationToken)
{
PublishCalls++;
LastRequest = request;
var artifacts = request.Payloads.Select(payload =>
{
var digest = ComputeDigest(payload.Content.Span);
return new SurfaceManifestArtifact
{
Kind = payload.Kind,
Uri = $"cas://test/{payload.Kind}/{digest}",
Digest = digest,
MediaType = payload.MediaType,
Format = payload.ArtifactFormat.ToString().ToLowerInvariant(),
SizeBytes = payload.Content.Length,
View = payload.View,
Metadata = payload.Metadata,
Storage = new SurfaceManifestStorage
{
Bucket = "test-bucket",
ObjectKey = $"objects/{digest}",
SizeBytes = payload.Content.Length,
ContentType = payload.MediaType
}
};
}).ToImmutableArray();
var document = new SurfaceManifestDocument
{
Tenant = _tenant,
ImageDigest = request.ImageDigest,
ScanId = request.ScanId,
GeneratedAt = DateTimeOffset.UtcNow,
Source = new SurfaceManifestSource
{
Component = request.Component,
Version = request.Version,
WorkerInstance = request.WorkerInstance,
Attempt = request.Attempt
},
Artifacts = artifacts
};
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(document, _options);
var manifestDigest = ComputeDigest(manifestBytes);
LastManifestDigest = manifestDigest;
var result = new SurfaceManifestPublishResult(
ManifestDigest: manifestDigest,
ManifestUri: $"cas://test/manifests/{manifestDigest}",
ArtifactId: $"surface-manifest::{manifestDigest}",
Document: document);
return Task.FromResult(result);
}
private static string ComputeDigest(ReadOnlySpan<byte> content)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(content, hash);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}
private sealed class TestSurfaceEnvironment : ISurfaceEnvironment
{
public TestSurfaceEnvironment(string tenant)
{
var cacheRoot = new DirectoryInfo(Path.Combine(Path.GetTempPath(), "surface-cache-test"));
Settings = new SurfaceEnvironmentSettings(
SurfaceFsEndpoint: new Uri("https://surface.local"),
SurfaceFsBucket: "test-bucket",
SurfaceFsRegion: null,
CacheRoot: cacheRoot,
CacheQuotaMegabytes: 512,
PrefetchEnabled: false,
FeatureFlags: Array.Empty<string>(),
Secrets: new SurfaceSecretsConfiguration("none", tenant, null, null, null, false),
Tenant: tenant,
Tls: new SurfaceTlsConfiguration(null, null, null));
}
public SurfaceEnvironmentSettings Settings { get; }
public IReadOnlyDictionary<string, string> RawVariables { get; } = new Dictionary<string, string>();
}
private sealed class FakeJobLease : IScanJobLease
{
private readonly Dictionary<string, string> _metadata = new()
{
["queue"] = "tests",
["job.kind"] = "unit"
};
public string JobId { get; } = Guid.NewGuid().ToString("n");
public string ScanId { get; } = $"scan-{Guid.NewGuid():n}";
public int Attempt { get; } = 1;
public DateTimeOffset EnqueuedAtUtc { get; } = DateTimeOffset.UtcNow.AddMinutes(-1);
public DateTimeOffset LeasedAtUtc { get; } = DateTimeOffset.UtcNow;
public TimeSpan LeaseDuration { get; } = TimeSpan.FromMinutes(5);
public IReadOnlyDictionary<string, string> Metadata => _metadata;
public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask;
public ValueTask DisposeAsync() => ValueTask.CompletedTask;
}
}

View File

@@ -0,0 +1,61 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.Globalization;
using StellaOps.Scanner.Worker.Diagnostics;
namespace StellaOps.Scanner.Worker.Tests.TestInfrastructure;
public sealed class WorkerMeterListener : IDisposable
{
private readonly MeterListener _listener;
public ConcurrentBag<Measurement> Measurements { get; } = new();
public WorkerMeterListener()
{
_listener = new MeterListener
{
InstrumentPublished = (instrument, listener) =>
{
if (instrument.Meter.Name == ScannerWorkerInstrumentation.MeterName)
{
listener.EnableMeasurementEvents(instrument);
}
}
};
_listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
{
AddMeasurement(instrument, measurement, tags);
});
_listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
{
AddMeasurement(instrument, measurement, tags);
});
}
public void Start() => _listener.Start();
public void Dispose() => _listener.Dispose();
public sealed record Measurement(string InstrumentName, double Value, IReadOnlyDictionary<string, object?> Tags)
{
public object? this[string name] => Tags.TryGetValue(name, out var value) ? value : null;
}
private void AddMeasurement<T>(Instrument instrument, T measurement, ReadOnlySpan<KeyValuePair<string, object?>> tags)
where T : struct, IConvertible
{
var tagDictionary = new Dictionary<string, object?>(tags.Length, StringComparer.Ordinal);
foreach (var tag in tags)
{
tagDictionary[tag.Key] = tag.Value;
}
var value = Convert.ToDouble(measurement, System.Globalization.CultureInfo.InvariantCulture);
Measurements.Add(new Measurement(instrument.Name, value, tagDictionary));
}
}

View File

@@ -1,19 +1,19 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.Worker.Diagnostics;
using StellaOps.Scanner.Worker.Hosting;
using StellaOps.Scanner.Worker.Options;
using StellaOps.Scanner.Worker.Processing;
using Xunit;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Scanner.Worker.Diagnostics;
using StellaOps.Scanner.Worker.Hosting;
using StellaOps.Scanner.Worker.Options;
using StellaOps.Scanner.Worker.Processing;
using StellaOps.Scanner.Worker.Tests.TestInfrastructure;
using Xunit;
namespace StellaOps.Scanner.Worker.Tests;
@@ -48,8 +48,8 @@ public sealed class WorkerBasicScanScenarioTests
var scheduler = new ControlledDelayScheduler();
var analyzer = new TestAnalyzerDispatcher(scheduler);
using var listener = new WorkerMetricsListener();
listener.Start();
using var listener = new WorkerMeterListener();
listener.Start();
using var services = new ServiceCollection()
.AddLogging(builder =>
@@ -341,46 +341,6 @@ public sealed class WorkerBasicScanScenarioTests
}
}
private sealed class WorkerMetricsListener : IDisposable
{
private readonly MeterListener _listener;
public ConcurrentBag<Measurement> Measurements { get; } = new();
public WorkerMetricsListener()
{
_listener = new MeterListener
{
InstrumentPublished = (instrument, listener) =>
{
if (instrument.Meter.Name == ScannerWorkerInstrumentation.MeterName)
{
listener.EnableMeasurementEvents(instrument);
}
}
};
_listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
{
var tagDictionary = new Dictionary<string, object?>(tags.Length, StringComparer.Ordinal);
foreach (var tag in tags)
{
tagDictionary[tag.Key] = tag.Value;
}
Measurements.Add(new Measurement(instrument.Name, measurement, tagDictionary));
});
}
public void Start() => _listener.Start();
public void Dispose() => _listener.Dispose();
}
public sealed record Measurement(string InstrumentName, double Value, IReadOnlyDictionary<string, object?> Tags)
{
public object? this[string name] => Tags.TryGetValue(name, out var value) ? value : null;
}
private sealed class TestLoggerProvider : ILoggerProvider
{
private readonly ConcurrentQueue<TestLogEntry> _entries = new();

View File

@@ -0,0 +1,31 @@
using System.Text.Json.Serialization;
namespace StellaOps.TaskRunner.Core.Configuration;
public static class TaskRunnerStorageModes
{
public const string Filesystem = "filesystem";
public const string Mongo = "mongo";
}
public sealed class TaskRunnerStorageOptions
{
public string Mode { get; set; } = TaskRunnerStorageModes.Filesystem;
public TaskRunnerMongoOptions Mongo { get; set; } = new();
}
public sealed class TaskRunnerMongoOptions
{
public string ConnectionString { get; set; } = "mongodb://127.0.0.1:27017/stellaops-taskrunner";
public string? Database { get; set; }
public string RunsCollection { get; set; } = "pack_runs";
public string LogsCollection { get; set; } = "pack_run_logs";
public string ArtifactsCollection { get; set; } = "pack_artifacts";
public string ApprovalsCollection { get; set; } = "pack_run_approvals";
}

View File

@@ -0,0 +1,33 @@
namespace StellaOps.TaskRunner.Core.Execution;
/// <summary>
/// Persists pack run log entries in a deterministic append-only fashion.
/// </summary>
public interface IPackRunLogStore
{
/// <summary>
/// Appends a single log entry to the run log.
/// </summary>
Task AppendAsync(string runId, PackRunLogEntry entry, CancellationToken cancellationToken);
/// <summary>
/// Returns the log entries for the specified run in chronological order.
/// </summary>
IAsyncEnumerable<PackRunLogEntry> ReadAsync(string runId, CancellationToken cancellationToken);
/// <summary>
/// Determines whether any log entries exist for the specified run.
/// </summary>
Task<bool> ExistsAsync(string runId, CancellationToken cancellationToken);
}
/// <summary>
/// Represents a single structured log entry emitted during a pack run.
/// </summary>
public sealed record PackRunLogEntry(
DateTimeOffset Timestamp,
string Level,
string EventType,
string Message,
string? StepId,
IReadOnlyDictionary<string, string>? Metadata);

View File

@@ -0,0 +1,116 @@
using StellaOps.TaskRunner.Core.Execution.Simulation;
namespace StellaOps.TaskRunner.Core.Execution;
/// <summary>
/// Builds deterministic <see cref="PackRunState"/> snapshots for freshly scheduled runs.
/// </summary>
public static class PackRunStateFactory
{
public static PackRunState CreateInitialState(
PackRunExecutionContext context,
PackRunExecutionGraph graph,
PackRunSimulationEngine simulationEngine,
DateTimeOffset timestamp)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(graph);
ArgumentNullException.ThrowIfNull(simulationEngine);
var simulation = simulationEngine.Simulate(context.Plan);
var simulationIndex = IndexSimulation(simulation.Steps);
var stepRecords = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal);
foreach (var step in EnumerateSteps(graph.Steps))
{
var simulationStatus = simulationIndex.TryGetValue(step.Id, out var node)
? node.Status
: PackRunSimulationStatus.Pending;
var status = step.Enabled ? PackRunStepExecutionStatus.Pending : PackRunStepExecutionStatus.Skipped;
string? statusReason = null;
if (!step.Enabled)
{
statusReason = "disabled";
}
else if (simulationStatus == PackRunSimulationStatus.RequiresApproval)
{
statusReason = "requires-approval";
}
else if (simulationStatus == PackRunSimulationStatus.RequiresPolicy)
{
statusReason = "requires-policy";
}
else if (simulationStatus == PackRunSimulationStatus.Skipped)
{
status = PackRunStepExecutionStatus.Skipped;
statusReason = "condition-false";
}
var record = new PackRunStepStateRecord(
step.Id,
step.Kind,
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
status,
Attempts: 0,
LastTransitionAt: null,
NextAttemptAt: null,
StatusReason: statusReason);
stepRecords[step.Id] = record;
}
var failurePolicy = graph.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
return PackRunState.Create(
context.RunId,
context.Plan.Hash,
context.Plan,
failurePolicy,
context.RequestedAt,
stepRecords,
timestamp);
}
private static Dictionary<string, PackRunSimulationNode> IndexSimulation(IReadOnlyList<PackRunSimulationNode> nodes)
{
var result = new Dictionary<string, PackRunSimulationNode>(StringComparer.Ordinal);
foreach (var node in nodes)
{
IndexSimulationNode(node, result);
}
return result;
}
private static void IndexSimulationNode(PackRunSimulationNode node, Dictionary<string, PackRunSimulationNode> accumulator)
{
accumulator[node.Id] = node;
foreach (var child in node.Children)
{
IndexSimulationNode(child, accumulator);
}
}
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
{
foreach (var step in steps)
{
yield return step;
if (step.Children.Count == 0)
{
continue;
}
foreach (var child in EnumerateSteps(step.Children))
{
yield return child;
}
}
}
}

View File

@@ -0,0 +1,162 @@
using System.Collections.Concurrent;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using StellaOps.TaskRunner.Core.Execution;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
/// <summary>
/// Persists pack run logs as newline-delimited JSON for deterministic replay and offline mirroring.
/// </summary>
public sealed class FilePackRunLogStore : IPackRunLogStore
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private readonly string rootPath;
private readonly ConcurrentDictionary<string, SemaphoreSlim> fileLocks = new(StringComparer.Ordinal);
public FilePackRunLogStore(string rootPath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
this.rootPath = Path.GetFullPath(rootPath);
Directory.CreateDirectory(this.rootPath);
}
public async Task AppendAsync(string runId, PackRunLogEntry entry, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
ArgumentNullException.ThrowIfNull(entry);
var path = GetPath(runId);
Directory.CreateDirectory(Path.GetDirectoryName(path)!);
var gate = fileLocks.GetOrAdd(path, _ => new SemaphoreSlim(1, 1));
await gate.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
var document = PackRunLogEntryDocument.FromDomain(entry);
var json = JsonSerializer.Serialize(document, SerializerOptions);
await File.AppendAllTextAsync(path, json + Environment.NewLine, cancellationToken).ConfigureAwait(false);
}
finally
{
gate.Release();
}
}
public async IAsyncEnumerable<PackRunLogEntry> ReadAsync(
string runId,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var path = GetPath(runId);
if (!File.Exists(path))
{
yield break;
}
await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
using var reader = new StreamReader(stream, Encoding.UTF8);
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
yield break;
}
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
PackRunLogEntryDocument? document = null;
try
{
document = JsonSerializer.Deserialize<PackRunLogEntryDocument>(line, SerializerOptions);
}
catch
{
// Skip malformed entries to avoid stopping the stream; diagnostics are captured via worker logs.
}
if (document is null)
{
continue;
}
yield return document.ToDomain();
}
}
public Task<bool> ExistsAsync(string runId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var path = GetPath(runId);
return Task.FromResult(File.Exists(path));
}
private string GetPath(string runId)
{
var safe = Sanitize(runId);
return Path.Combine(rootPath, $"{safe}.ndjson");
}
private static string Sanitize(string value)
{
var result = value.Trim();
foreach (var invalid in Path.GetInvalidFileNameChars())
{
result = result.Replace(invalid, '_');
}
return string.IsNullOrWhiteSpace(result) ? "run" : result;
}
private sealed record PackRunLogEntryDocument(
DateTimeOffset Timestamp,
string Level,
string EventType,
string Message,
string? StepId,
Dictionary<string, string>? Metadata)
{
public static PackRunLogEntryDocument FromDomain(PackRunLogEntry entry)
{
var metadata = entry.Metadata is null
? null
: new Dictionary<string, string>(entry.Metadata, StringComparer.Ordinal);
return new PackRunLogEntryDocument(
entry.Timestamp,
entry.Level,
entry.EventType,
entry.Message,
entry.StepId,
metadata);
}
public PackRunLogEntry ToDomain()
{
IReadOnlyDictionary<string, string>? metadata = Metadata is null
? null
: new Dictionary<string, string>(Metadata, StringComparer.Ordinal);
return new PackRunLogEntry(
Timestamp,
Level,
EventType,
Message,
StepId,
metadata);
}
}
}

View File

@@ -0,0 +1,164 @@
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.TaskRunner.Core.Configuration;
using StellaOps.TaskRunner.Core.Execution;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
public sealed class MongoPackRunApprovalStore : IPackRunApprovalStore
{
private readonly IMongoCollection<PackRunApprovalDocument> collection;
public MongoPackRunApprovalStore(IMongoDatabase database, TaskRunnerMongoOptions options)
{
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(options);
collection = database.GetCollection<PackRunApprovalDocument>(options.ApprovalsCollection);
EnsureIndexes(collection);
}
public async Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
ArgumentNullException.ThrowIfNull(approvals);
var filter = Builders<PackRunApprovalDocument>.Filter.Eq(document => document.RunId, runId);
await collection.DeleteManyAsync(filter, cancellationToken).ConfigureAwait(false);
if (approvals.Count == 0)
{
return;
}
var documents = approvals
.Select(approval => PackRunApprovalDocument.FromDomain(runId, approval))
.ToList();
await collection.InsertManyAsync(documents, cancellationToken: cancellationToken).ConfigureAwait(false);
}
public async Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var filter = Builders<PackRunApprovalDocument>.Filter.Eq(document => document.RunId, runId);
var documents = await collection
.Find(filter)
.SortBy(document => document.ApprovalId)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents
.Select(document => document.ToDomain())
.ToList();
}
public async Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
ArgumentNullException.ThrowIfNull(approval);
var filter = Builders<PackRunApprovalDocument>.Filter.And(
Builders<PackRunApprovalDocument>.Filter.Eq(document => document.RunId, runId),
Builders<PackRunApprovalDocument>.Filter.Eq(document => document.ApprovalId, approval.ApprovalId));
var existingDocument = await collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (existingDocument is null)
{
throw new InvalidOperationException($"Approval '{approval.ApprovalId}' not found for run '{runId}'.");
}
var document = PackRunApprovalDocument.FromDomain(runId, approval, existingDocument.Id);
await collection
.ReplaceOneAsync(filter, document, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static void EnsureIndexes(IMongoCollection<PackRunApprovalDocument> target)
{
var models = new[]
{
new CreateIndexModel<PackRunApprovalDocument>(
Builders<PackRunApprovalDocument>.IndexKeys
.Ascending(document => document.RunId)
.Ascending(document => document.ApprovalId),
new CreateIndexOptions { Unique = true }),
new CreateIndexModel<PackRunApprovalDocument>(
Builders<PackRunApprovalDocument>.IndexKeys
.Ascending(document => document.RunId)
.Ascending(document => document.Status))
};
target.Indexes.CreateMany(models);
}
private sealed class PackRunApprovalDocument
{
[BsonId]
public ObjectId Id { get; init; }
public string RunId { get; init; } = default!;
public string ApprovalId { get; init; } = default!;
public IReadOnlyList<string> RequiredGrants { get; init; } = Array.Empty<string>();
public IReadOnlyList<string> StepIds { get; init; } = Array.Empty<string>();
public IReadOnlyList<string> Messages { get; init; } = Array.Empty<string>();
public string? ReasonTemplate { get; init; }
public DateTime RequestedAt { get; init; }
public string Status { get; init; } = default!;
public string? ActorId { get; init; }
public DateTime? CompletedAt { get; init; }
public string? Summary { get; init; }
public static PackRunApprovalDocument FromDomain(string runId, PackRunApprovalState approval, ObjectId? id = null)
=> new()
{
Id = id ?? ObjectId.GenerateNewId(),
RunId = runId,
ApprovalId = approval.ApprovalId,
RequiredGrants = approval.RequiredGrants ?? Array.Empty<string>(),
StepIds = approval.StepIds ?? Array.Empty<string>(),
Messages = approval.Messages ?? Array.Empty<string>(),
ReasonTemplate = approval.ReasonTemplate,
RequestedAt = approval.RequestedAt.UtcDateTime,
Status = approval.Status.ToString(),
ActorId = approval.ActorId,
CompletedAt = approval.CompletedAt?.UtcDateTime,
Summary = approval.Summary
};
public PackRunApprovalState ToDomain()
{
var status = Enum.Parse<PackRunApprovalStatus>(Status, ignoreCase: true);
return new PackRunApprovalState(
ApprovalId,
RequiredGrants?.ToList() ?? new List<string>(),
StepIds?.ToList() ?? new List<string>(),
Messages?.ToList() ?? new List<string>(),
ReasonTemplate,
new DateTimeOffset(RequestedAt, TimeSpan.Zero),
status,
ActorId,
CompletedAt is null ? null : new DateTimeOffset(CompletedAt.Value, TimeSpan.Zero),
Summary);
}
}
}

View File

@@ -0,0 +1,193 @@
using System.Text.Json;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.TaskRunner.Core.Configuration;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
public sealed class MongoPackRunArtifactUploader : IPackRunArtifactUploader
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IMongoCollection<PackRunArtifactDocument> collection;
private readonly TimeProvider timeProvider;
private readonly ILogger<MongoPackRunArtifactUploader> logger;
public MongoPackRunArtifactUploader(
IMongoDatabase database,
TaskRunnerMongoOptions options,
TimeProvider? timeProvider,
ILogger<MongoPackRunArtifactUploader> logger)
{
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(options);
collection = database.GetCollection<PackRunArtifactDocument>(options.ArtifactsCollection);
this.timeProvider = timeProvider ?? TimeProvider.System;
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
EnsureIndexes(collection);
}
public async Task UploadAsync(
PackRunExecutionContext context,
PackRunState state,
IReadOnlyList<TaskPackPlanOutput> outputs,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(state);
ArgumentNullException.ThrowIfNull(outputs);
var filter = Builders<PackRunArtifactDocument>.Filter.Eq(document => document.RunId, context.RunId);
await collection.DeleteManyAsync(filter, cancellationToken).ConfigureAwait(false);
if (outputs.Count == 0)
{
return;
}
var timestamp = timeProvider.GetUtcNow();
var documents = new List<PackRunArtifactDocument>(outputs.Count);
foreach (var output in outputs)
{
cancellationToken.ThrowIfCancellationRequested();
documents.Add(ProcessOutput(context, output, timestamp));
}
await collection.InsertManyAsync(documents, cancellationToken: cancellationToken).ConfigureAwait(false);
}
private PackRunArtifactDocument ProcessOutput(
PackRunExecutionContext context,
TaskPackPlanOutput output,
DateTimeOffset capturedAt)
{
var sourcePath = ResolveString(output.Path);
var expressionNode = ResolveExpression(output.Expression);
string status = "skipped";
string? notes = null;
string? storedPath = null;
if (IsFileOutput(output))
{
if (string.IsNullOrWhiteSpace(sourcePath))
{
status = "unresolved";
notes = "Output path requires runtime value.";
}
else if (!File.Exists(sourcePath))
{
status = "missing";
notes = $"Source file '{sourcePath}' not found.";
logger.LogWarning(
"Pack run {RunId} output {Output} referenced missing file {Path}.",
context.RunId,
output.Name,
sourcePath);
}
else
{
status = "referenced";
storedPath = sourcePath;
}
}
BsonDocument? expressionDocument = null;
if (expressionNode is not null)
{
var json = expressionNode.ToJsonString(SerializerOptions);
expressionDocument = BsonDocument.Parse(json);
status = status is "referenced" ? status : "materialized";
}
return new PackRunArtifactDocument
{
Id = ObjectId.GenerateNewId(),
RunId = context.RunId,
Name = output.Name,
Type = output.Type,
SourcePath = sourcePath,
StoredPath = storedPath,
Status = status,
Notes = notes,
CapturedAt = capturedAt.UtcDateTime,
Expression = expressionDocument
};
}
private static bool IsFileOutput(TaskPackPlanOutput output)
=> string.Equals(output.Type, "file", StringComparison.OrdinalIgnoreCase);
private static string? ResolveString(TaskPackPlanParameterValue? parameter)
{
if (parameter is null || parameter.RequiresRuntimeValue || parameter.Value is null)
{
return null;
}
if (parameter.Value is JsonValue jsonValue && jsonValue.TryGetValue<string>(out var value))
{
return value;
}
return null;
}
private static JsonNode? ResolveExpression(TaskPackPlanParameterValue? parameter)
{
if (parameter is null || parameter.RequiresRuntimeValue)
{
return null;
}
return parameter.Value;
}
private static void EnsureIndexes(IMongoCollection<PackRunArtifactDocument> target)
{
var models = new[]
{
new CreateIndexModel<PackRunArtifactDocument>(
Builders<PackRunArtifactDocument>.IndexKeys
.Ascending(document => document.RunId)
.Ascending(document => document.Name),
new CreateIndexOptions { Unique = true }),
new CreateIndexModel<PackRunArtifactDocument>(
Builders<PackRunArtifactDocument>.IndexKeys
.Ascending(document => document.RunId)
.Ascending(document => document.Status))
};
target.Indexes.CreateMany(models);
}
public sealed class PackRunArtifactDocument
{
[BsonId]
public ObjectId Id { get; init; }
public string RunId { get; init; } = default!;
public string Name { get; init; } = default!;
public string Type { get; init; } = default!;
public string? SourcePath { get; init; }
public string? StoredPath { get; init; }
public string Status { get; init; } = default!;
public string? Notes { get; init; }
public DateTime CapturedAt { get; init; }
public BsonDocument? Expression { get; init; }
}
}

View File

@@ -0,0 +1,162 @@
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.TaskRunner.Core.Configuration;
using StellaOps.TaskRunner.Core.Execution;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
public sealed class MongoPackRunLogStore : IPackRunLogStore
{
private readonly IMongoCollection<PackRunLogDocument> collection;
public MongoPackRunLogStore(IMongoDatabase database, TaskRunnerMongoOptions options)
{
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(options);
collection = database.GetCollection<PackRunLogDocument>(options.LogsCollection);
EnsureIndexes(collection);
}
public async Task AppendAsync(string runId, PackRunLogEntry entry, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
ArgumentNullException.ThrowIfNull(entry);
var filter = Builders<PackRunLogDocument>.Filter.Eq(document => document.RunId, runId);
for (var attempt = 0; attempt < 5; attempt++)
{
cancellationToken.ThrowIfCancellationRequested();
var last = await collection
.Find(filter)
.SortByDescending(document => document.Sequence)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
var nextSequence = last is null ? 1 : last.Sequence + 1;
var document = PackRunLogDocument.FromDomain(runId, nextSequence, entry);
try
{
await collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
return;
}
catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey)
{
await Task.Delay(TimeSpan.FromMilliseconds(10), cancellationToken).ConfigureAwait(false);
}
}
throw new InvalidOperationException($"Failed to append log entry for run '{runId}' after multiple attempts.");
}
public async IAsyncEnumerable<PackRunLogEntry> ReadAsync(
string runId,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var filter = Builders<PackRunLogDocument>.Filter.Eq(document => document.RunId, runId);
using var cursor = await collection
.Find(filter)
.SortBy(document => document.Sequence)
.ToCursorAsync(cancellationToken)
.ConfigureAwait(false);
while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false))
{
foreach (var document in cursor.Current)
{
yield return document.ToDomain();
}
}
}
public async Task<bool> ExistsAsync(string runId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var filter = Builders<PackRunLogDocument>.Filter.Eq(document => document.RunId, runId);
return await collection
.Find(filter)
.Limit(1)
.AnyAsync(cancellationToken)
.ConfigureAwait(false);
}
private static void EnsureIndexes(IMongoCollection<PackRunLogDocument> target)
{
var models = new[]
{
new CreateIndexModel<PackRunLogDocument>(
Builders<PackRunLogDocument>.IndexKeys
.Ascending(document => document.RunId)
.Ascending(document => document.Sequence),
new CreateIndexOptions { Unique = true }),
new CreateIndexModel<PackRunLogDocument>(
Builders<PackRunLogDocument>.IndexKeys
.Ascending(document => document.RunId)
.Ascending(document => document.Timestamp))
};
target.Indexes.CreateMany(models);
}
public sealed class PackRunLogDocument
{
[BsonId]
public ObjectId Id { get; init; }
public string RunId { get; init; } = default!;
public long Sequence { get; init; }
public DateTime Timestamp { get; init; }
public string Level { get; init; } = default!;
public string EventType { get; init; } = default!;
public string Message { get; init; } = default!;
public string? StepId { get; init; }
public Dictionary<string, string>? Metadata { get; init; }
public static PackRunLogDocument FromDomain(string runId, long sequence, PackRunLogEntry entry)
=> new()
{
Id = ObjectId.GenerateNewId(),
RunId = runId,
Sequence = sequence,
Timestamp = entry.Timestamp.UtcDateTime,
Level = entry.Level,
EventType = entry.EventType,
Message = entry.Message,
StepId = entry.StepId,
Metadata = entry.Metadata is null
? null
: new Dictionary<string, string>(entry.Metadata, StringComparer.Ordinal)
};
public PackRunLogEntry ToDomain()
{
IReadOnlyDictionary<string, string>? metadata = Metadata is null
? null
: new Dictionary<string, string>(Metadata, StringComparer.Ordinal);
return new PackRunLogEntry(
new DateTimeOffset(Timestamp, TimeSpan.Zero),
Level,
EventType,
Message,
StepId,
metadata);
}
}
}

View File

@@ -0,0 +1,209 @@
using System.Collections.ObjectModel;
using System.Text.Json;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.TaskRunner.Core.Configuration;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Planning;
namespace StellaOps.TaskRunner.Infrastructure.Execution;
public sealed class MongoPackRunStateStore : IPackRunStateStore
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IMongoCollection<PackRunStateDocument> collection;
public MongoPackRunStateStore(IMongoDatabase database, TaskRunnerMongoOptions options)
{
ArgumentNullException.ThrowIfNull(database);
ArgumentNullException.ThrowIfNull(options);
collection = database.GetCollection<PackRunStateDocument>(options.RunsCollection);
EnsureIndexes(collection);
}
public async Task<PackRunState?> GetAsync(string runId, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var filter = Builders<PackRunStateDocument>.Filter.Eq(document => document.RunId, runId);
var document = await collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return document?.ToDomain();
}
public async Task SaveAsync(PackRunState state, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(state);
var document = PackRunStateDocument.FromDomain(state);
var filter = Builders<PackRunStateDocument>.Filter.Eq(existing => existing.RunId, state.RunId);
await collection
.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken)
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<PackRunState>> ListAsync(CancellationToken cancellationToken)
{
var documents = await collection
.Find(FilterDefinition<PackRunStateDocument>.Empty)
.SortByDescending(document => document.UpdatedAt)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents
.Select(document => document.ToDomain())
.ToList();
}
private static void EnsureIndexes(IMongoCollection<PackRunStateDocument> target)
{
var models = new[]
{
new CreateIndexModel<PackRunStateDocument>(
Builders<PackRunStateDocument>.IndexKeys.Descending(document => document.UpdatedAt)),
new CreateIndexModel<PackRunStateDocument>(
Builders<PackRunStateDocument>.IndexKeys.Ascending(document => document.PlanHash))
};
target.Indexes.CreateMany(models);
}
private sealed class PackRunStateDocument
{
[BsonId]
public string RunId { get; init; } = default!;
public string PlanHash { get; init; } = default!;
public BsonDocument Plan { get; init; } = default!;
public BsonDocument FailurePolicy { get; init; } = default!;
public DateTime RequestedAt { get; init; }
public DateTime CreatedAt { get; init; }
public DateTime UpdatedAt { get; init; }
public List<PackRunStepDocument> Steps { get; init; } = new();
public static PackRunStateDocument FromDomain(PackRunState state)
{
var planDocument = BsonDocument.Parse(JsonSerializer.Serialize(state.Plan, SerializerOptions));
var failurePolicyDocument = BsonDocument.Parse(JsonSerializer.Serialize(state.FailurePolicy, SerializerOptions));
var steps = state.Steps.Values
.OrderBy(step => step.StepId, StringComparer.Ordinal)
.Select(PackRunStepDocument.FromDomain)
.ToList();
return new PackRunStateDocument
{
RunId = state.RunId,
PlanHash = state.PlanHash,
Plan = planDocument,
FailurePolicy = failurePolicyDocument,
RequestedAt = state.RequestedAt.UtcDateTime,
CreatedAt = state.CreatedAt.UtcDateTime,
UpdatedAt = state.UpdatedAt.UtcDateTime,
Steps = steps
};
}
public PackRunState ToDomain()
{
var planJson = Plan.ToJson();
var plan = JsonSerializer.Deserialize<TaskPackPlan>(planJson, SerializerOptions)
?? throw new InvalidOperationException("Failed to deserialize stored TaskPackPlan.");
var failurePolicyJson = FailurePolicy.ToJson();
var failurePolicy = JsonSerializer.Deserialize<TaskPackPlanFailurePolicy>(failurePolicyJson, SerializerOptions)
?? throw new InvalidOperationException("Failed to deserialize stored TaskPackPlanFailurePolicy.");
var stepRecords = Steps
.Select(step => step.ToDomain())
.ToDictionary(record => record.StepId, record => record, StringComparer.Ordinal);
return new PackRunState(
RunId,
PlanHash,
plan,
failurePolicy,
new DateTimeOffset(RequestedAt, TimeSpan.Zero),
new DateTimeOffset(CreatedAt, TimeSpan.Zero),
new DateTimeOffset(UpdatedAt, TimeSpan.Zero),
new ReadOnlyDictionary<string, PackRunStepStateRecord>(stepRecords));
}
}
private sealed class PackRunStepDocument
{
public string StepId { get; init; } = default!;
public string Kind { get; init; } = default!;
public bool Enabled { get; init; }
public bool ContinueOnError { get; init; }
public int? MaxParallel { get; init; }
public string? ApprovalId { get; init; }
public string? GateMessage { get; init; }
public string Status { get; init; } = default!;
public int Attempts { get; init; }
public DateTime? LastTransitionAt { get; init; }
public DateTime? NextAttemptAt { get; init; }
public string? StatusReason { get; init; }
public static PackRunStepDocument FromDomain(PackRunStepStateRecord record)
=> new()
{
StepId = record.StepId,
Kind = record.Kind.ToString(),
Enabled = record.Enabled,
ContinueOnError = record.ContinueOnError,
MaxParallel = record.MaxParallel,
ApprovalId = record.ApprovalId,
GateMessage = record.GateMessage,
Status = record.Status.ToString(),
Attempts = record.Attempts,
LastTransitionAt = record.LastTransitionAt?.UtcDateTime,
NextAttemptAt = record.NextAttemptAt?.UtcDateTime,
StatusReason = record.StatusReason
};
public PackRunStepStateRecord ToDomain()
{
var kind = Enum.Parse<PackRunStepKind>(Kind, ignoreCase: true);
var status = Enum.Parse<PackRunStepExecutionStatus>(Status, ignoreCase: true);
return new PackRunStepStateRecord(
StepId,
kind,
Enabled,
ContinueOnError,
MaxParallel,
ApprovalId,
GateMessage,
status,
Attempts,
LastTransitionAt is null ? null : new DateTimeOffset(LastTransitionAt.Value, TimeSpan.Zero),
NextAttemptAt is null ? null : new DateTimeOffset(NextAttemptAt.Value, TimeSpan.Zero),
StatusReason);
}
}
}

View File

@@ -3,6 +3,7 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,88 @@
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Infrastructure.Execution;
namespace StellaOps.TaskRunner.Tests;
public sealed class FilePackRunLogStoreTests : IDisposable
{
private readonly string rootPath;
public FilePackRunLogStoreTests()
{
rootPath = Path.Combine(Path.GetTempPath(), "StellaOps_TaskRunnerTests", Guid.NewGuid().ToString("n"));
}
[Fact]
public async Task AppendAndReadAsync_RoundTripsEntriesInOrder()
{
var store = new FilePackRunLogStore(rootPath);
var runId = "run-append-test";
var first = new PackRunLogEntry(
DateTimeOffset.UtcNow,
"info",
"run.created",
"Run created.",
StepId: null,
Metadata: new Dictionary<string, string>(StringComparer.Ordinal)
{
["planHash"] = "hash-1"
});
var second = new PackRunLogEntry(
DateTimeOffset.UtcNow.AddSeconds(1),
"info",
"step.started",
"Step started.",
StepId: "build",
Metadata: null);
await store.AppendAsync(runId, first, CancellationToken.None);
await store.AppendAsync(runId, second, CancellationToken.None);
var reloaded = new List<PackRunLogEntry>();
await foreach (var entry in store.ReadAsync(runId, CancellationToken.None))
{
reloaded.Add(entry);
}
Assert.Collection(
reloaded,
entry =>
{
Assert.Equal("run.created", entry.EventType);
Assert.NotNull(entry.Metadata);
Assert.Equal("hash-1", entry.Metadata!["planHash"]);
},
entry =>
{
Assert.Equal("step.started", entry.EventType);
Assert.Equal("build", entry.StepId);
});
}
[Fact]
public async Task ExistsAsync_ReturnsFalseWhenNoLogPresent()
{
var store = new FilePackRunLogStore(rootPath);
var exists = await store.ExistsAsync("missing-run", CancellationToken.None);
Assert.False(exists);
}
public void Dispose()
{
try
{
if (Directory.Exists(rootPath))
{
Directory.Delete(rootPath, recursive: true);
}
}
catch
{
// Ignore cleanup failures to keep tests deterministic.
}
}
}

View File

@@ -0,0 +1,196 @@
using System.Text.Json.Nodes;
using Microsoft.Extensions.Logging.Abstractions;
using MongoDB.Driver;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
using StellaOps.TaskRunner.Core.Planning;
using StellaOps.TaskRunner.Core.TaskPacks;
using StellaOps.TaskRunner.Infrastructure.Execution;
using Xunit;
using Xunit.Sdk;
namespace StellaOps.TaskRunner.Tests;
public sealed class MongoPackRunStoresTests
{
[Fact]
public async Task StateStore_RoundTrips_State()
{
using var context = MongoTaskRunnerTestContext.Create();
var mongoOptions = context.CreateMongoOptions();
var stateStore = new MongoPackRunStateStore(context.Database, mongoOptions);
var plan = CreatePlan();
var executionContext = new PackRunExecutionContext("mongo-run-state", plan, DateTimeOffset.UtcNow);
var graph = new PackRunExecutionGraphBuilder().Build(plan);
var simulationEngine = new PackRunSimulationEngine();
var state = PackRunStateFactory.CreateInitialState(executionContext, graph, simulationEngine, DateTimeOffset.UtcNow);
await stateStore.SaveAsync(state, CancellationToken.None);
var reloaded = await stateStore.GetAsync(state.RunId, CancellationToken.None);
Assert.NotNull(reloaded);
Assert.Equal(state.RunId, reloaded!.RunId);
Assert.Equal(state.PlanHash, reloaded.PlanHash);
Assert.Equal(state.Steps.Count, reloaded.Steps.Count);
}
[Fact]
public async Task LogStore_Appends_And_Reads_In_Order()
{
using var context = MongoTaskRunnerTestContext.Create();
var mongoOptions = context.CreateMongoOptions();
var logStore = new MongoPackRunLogStore(context.Database, mongoOptions);
var runId = "mongo-log";
await logStore.AppendAsync(runId, new PackRunLogEntry(DateTimeOffset.UtcNow, "info", "run.created", "created", null, null), CancellationToken.None);
await logStore.AppendAsync(runId, new PackRunLogEntry(DateTimeOffset.UtcNow.AddSeconds(1), "warn", "step.retry", "retry", "step-a", new Dictionary<string, string> { ["attempt"] = "2" }), CancellationToken.None);
var entries = new List<PackRunLogEntry>();
await foreach (var entry in logStore.ReadAsync(runId, CancellationToken.None))
{
entries.Add(entry);
}
Assert.Equal(2, entries.Count);
Assert.Equal("run.created", entries[0].EventType);
Assert.Equal("step.retry", entries[1].EventType);
Assert.Equal("step-a", entries[1].StepId);
Assert.True(await logStore.ExistsAsync(runId, CancellationToken.None));
}
[Fact]
public async Task ApprovalStore_RoundTrips_And_Updates()
{
using var context = MongoTaskRunnerTestContext.Create();
var mongoOptions = context.CreateMongoOptions();
var approvalStore = new MongoPackRunApprovalStore(context.Database, mongoOptions);
var runId = "mongo-approvals";
var approval = new PackRunApprovalState(
"security-review",
new[] { "packs.approve" },
new[] { "step-plan" },
Array.Empty<string>(),
reasonTemplate: "Security approval required.",
DateTimeOffset.UtcNow,
PackRunApprovalStatus.Pending);
await approvalStore.SaveAsync(runId, new[] { approval }, CancellationToken.None);
var approvals = await approvalStore.GetAsync(runId, CancellationToken.None);
Assert.Single(approvals);
var updated = approval.Approve("approver", DateTimeOffset.UtcNow, "Approved");
await approvalStore.UpdateAsync(runId, updated, CancellationToken.None);
approvals = await approvalStore.GetAsync(runId, CancellationToken.None);
Assert.Single(approvals);
Assert.Equal(PackRunApprovalStatus.Approved, approvals[0].Status);
Assert.Equal("approver", approvals[0].ActorId);
}
[Fact]
public async Task ArtifactUploader_Persists_Metadata()
{
using var context = MongoTaskRunnerTestContext.Create();
var mongoOptions = context.CreateMongoOptions();
var database = context.Database;
var artifactUploader = new MongoPackRunArtifactUploader(
database,
mongoOptions,
TimeProvider.System,
NullLogger<MongoPackRunArtifactUploader>.Instance);
var plan = CreatePlanWithOutputs(out var outputFile);
try
{
var executionContext = new PackRunExecutionContext("mongo-artifacts", plan, DateTimeOffset.UtcNow);
var graph = new PackRunExecutionGraphBuilder().Build(plan);
var simulationEngine = new PackRunSimulationEngine();
var state = PackRunStateFactory.CreateInitialState(executionContext, graph, simulationEngine, DateTimeOffset.UtcNow);
await artifactUploader.UploadAsync(executionContext, state, plan.Outputs, CancellationToken.None);
var documents = await database
.GetCollection<MongoPackRunArtifactUploader.PackRunArtifactDocument>(mongoOptions.ArtifactsCollection)
.Find(Builders<MongoPackRunArtifactUploader.PackRunArtifactDocument>.Filter.Empty)
.ToListAsync(TestContext.Current.CancellationToken);
var bundleDocument = Assert.Single(documents, d => string.Equals(d.Name, "bundlePath", StringComparison.Ordinal));
Assert.Equal("file", bundleDocument.Type);
Assert.Equal(outputFile, bundleDocument.SourcePath);
Assert.Equal("referenced", bundleDocument.Status);
}
finally
{
if (File.Exists(outputFile))
{
File.Delete(outputFile);
}
}
}
private static TaskPackPlan CreatePlan()
{
var manifest = TestManifests.Load(TestManifests.Sample);
var planner = new TaskPackPlanner();
var result = planner.Plan(manifest);
if (!result.Success || result.Plan is null)
{
Assert.Skip("Failed to build task pack plan for Mongo tests.");
throw new InvalidOperationException();
}
return result.Plan;
}
private static TaskPackPlan CreatePlanWithOutputs(out string outputFile)
{
var manifest = TestManifests.Load(TestManifests.Output);
var planner = new TaskPackPlanner();
var result = planner.Plan(manifest);
if (!result.Success || result.Plan is null)
{
Assert.Skip("Failed to build output plan for Mongo tests.");
throw new InvalidOperationException();
}
// Materialize a fake output file referenced by the plan.
outputFile = Path.Combine(Path.GetTempPath(), $"taskrunner-output-{Guid.NewGuid():N}.txt");
File.WriteAllText(outputFile, "fixture");
// Update the plan output path parameter to point at the file we just created.
var originalPlan = result.Plan;
var resolvedFile = outputFile;
var outputs = originalPlan.Outputs
.Select(output =>
{
if (!string.Equals(output.Name, "bundlePath", StringComparison.Ordinal))
{
return output;
}
var node = JsonNode.Parse($"\"{resolvedFile.Replace("\\", "\\\\")}\"");
var parameter = new TaskPackPlanParameterValue(node, null, null, false);
return output with { Path = parameter };
})
.ToArray();
return new TaskPackPlan(
originalPlan.Metadata,
originalPlan.Inputs,
originalPlan.Steps,
originalPlan.Hash,
originalPlan.Approvals,
originalPlan.Secrets,
outputs,
originalPlan.FailurePolicy);
}
}

View File

@@ -0,0 +1,89 @@
using Mongo2Go;
using MongoDB.Driver;
using StellaOps.TaskRunner.Core.Configuration;
using StellaOps.Testing;
using Xunit;
namespace StellaOps.TaskRunner.Tests;
internal sealed class MongoTaskRunnerTestContext : IAsyncDisposable, IDisposable
{
private readonly MongoDbRunner? runner;
private readonly string databaseName;
private readonly IMongoClient client;
private readonly string connectionString;
private MongoTaskRunnerTestContext(
IMongoClient client,
IMongoDatabase database,
MongoDbRunner? runner,
string databaseName,
string connectionString)
{
this.client = client;
Database = database;
this.runner = runner;
this.databaseName = databaseName;
this.connectionString = connectionString;
}
public IMongoDatabase Database { get; }
public static MongoTaskRunnerTestContext Create()
{
OpenSslLegacyShim.EnsureOpenSsl11();
var uri = Environment.GetEnvironmentVariable("STELLAOPS_TEST_MONGO_URI");
if (!string.IsNullOrWhiteSpace(uri))
{
try
{
var url = MongoUrl.Create(uri);
var client = new MongoClient(url);
var databaseName = string.IsNullOrWhiteSpace(url.DatabaseName)
? $"taskrunner-tests-{Guid.NewGuid():N}"
: url.DatabaseName;
var database = client.GetDatabase(databaseName);
return new MongoTaskRunnerTestContext(client, database, runner: null, databaseName, uri);
}
catch (Exception ex)
{
Assert.Skip($"Failed to connect to MongoDB using STELLAOPS_TEST_MONGO_URI: {ex.Message}");
throw new InvalidOperationException(); // Unreachable
}
}
try
{
var runner = MongoDbRunner.Start(singleNodeReplSet: false);
var client = new MongoClient(runner.ConnectionString);
var databaseName = $"taskrunner-tests-{Guid.NewGuid():N}";
var database = client.GetDatabase(databaseName);
return new MongoTaskRunnerTestContext(client, database, runner, databaseName, runner.ConnectionString);
}
catch (Exception ex)
{
Assert.Skip($"Unable to start embedded MongoDB (Mongo2Go): {ex.Message}");
throw new InvalidOperationException(); // Unreachable
}
}
public async ValueTask DisposeAsync()
{
await client.DropDatabaseAsync(databaseName);
runner?.Dispose();
}
public void Dispose()
{
client.DropDatabase(databaseName);
runner?.Dispose();
}
public TaskRunnerMongoOptions CreateMongoOptions()
=> new()
{
ConnectionString = connectionString,
Database = databaseName
};
}

View File

@@ -0,0 +1,40 @@
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
using StellaOps.TaskRunner.Core.Planning;
using StellaOps.TaskRunner.Core.TaskPacks;
namespace StellaOps.TaskRunner.Tests;
public sealed class PackRunStateFactoryTests
{
[Fact]
public void CreateInitialState_AssignsGateReasons()
{
var manifest = TestManifests.Load(TestManifests.Sample);
var planner = new TaskPackPlanner();
var planResult = planner.Plan(manifest);
Assert.True(planResult.Success);
Assert.NotNull(planResult.Plan);
var plan = planResult.Plan!;
var context = new PackRunExecutionContext("run-state-factory", plan, DateTimeOffset.UtcNow);
var graphBuilder = new PackRunExecutionGraphBuilder();
var graph = graphBuilder.Build(plan);
var simulationEngine = new PackRunSimulationEngine();
var timestamp = DateTimeOffset.UtcNow;
var state = PackRunStateFactory.CreateInitialState(context, graph, simulationEngine, timestamp);
Assert.Equal("run-state-factory", state.RunId);
Assert.Equal(plan.Hash, state.PlanHash);
Assert.True(state.Steps.TryGetValue("approval", out var approvalStep));
Assert.Equal(PackRunStepExecutionStatus.Pending, approvalStep.Status);
Assert.Equal("requires-approval", approvalStep.StatusReason);
Assert.True(state.Steps.TryGetValue("plan-step", out var planStep));
Assert.Equal(PackRunStepExecutionStatus.Pending, planStep.Status);
Assert.Null(planStep.StatusReason);
}
}

View File

@@ -1,136 +1,46 @@
<?xml version="1.0" ?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<IsPackable>false</IsPackable>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/>
<PackageReference Include="xunit.v3" Version="3.0.0"/>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/>
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/>
</ItemGroup>
<ItemGroup>
<Using Include="Xunit"/>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/>
<ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/>
<ProjectReference Include="..\..\..\AirGap\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy.csproj"/>
</ItemGroup>
</Project>
<?xml version="1.0"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<OutputType>Exe</OutputType>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="Mongo2Go" Version="4.1.0" />
<PackageReference Include="xunit.v3" Version="3.0.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj" />
<ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj" />
<ProjectReference Include="..\..\..\AirGap\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy\StellaOps.AirGap.Policy.csproj" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\..\..\tests\native/openssl-1.1/linux-x64/*"
Link="native/linux-x64/%(Filename)%(Extension)"
CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
<ItemGroup>
<Compile Include="..\..\..\..\tests\shared\OpenSslLegacyShim.cs">
<Link>Shared\OpenSslLegacyShim.cs</Link>
</Compile>
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
</Project>

View File

@@ -1,5 +1,11 @@
using System.Text.Json.Nodes;
using Microsoft.AspNetCore.Mvc;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Options;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
@@ -11,20 +17,52 @@ using StellaOps.TaskRunner.WebService;
var builder = WebApplication.CreateBuilder(args);
builder.Services.Configure<TaskRunnerServiceOptions>(builder.Configuration.GetSection("TaskRunner"));
builder.Services.AddSingleton<TaskPackManifestLoader>();
builder.Services.AddSingleton<TaskPackManifestLoader>();
builder.Services.AddSingleton<TaskPackPlanner>();
builder.Services.AddSingleton<PackRunSimulationEngine>();
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
builder.Services.AddSingleton<IPackRunApprovalStore>(sp =>
var storageOptions = builder.Configuration.GetSection("TaskRunner:Storage").Get<TaskRunnerStorageOptions>() ?? new TaskRunnerStorageOptions();
builder.Services.AddSingleton(storageOptions);
if (string.Equals(storageOptions.Mode, TaskRunnerStorageModes.Mongo, StringComparison.OrdinalIgnoreCase))
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
return new FilePackRunApprovalStore(options.ApprovalStorePath);
});
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
builder.Services.AddSingleton(storageOptions.Mongo);
builder.Services.AddSingleton<IMongoClient>(_ => new MongoClient(storageOptions.Mongo.ConnectionString));
builder.Services.AddSingleton<IMongoDatabase>(sp =>
{
var mongoOptions = storageOptions.Mongo;
var client = sp.GetRequiredService<IMongoClient>();
var mongoUrl = MongoUrl.Create(mongoOptions.ConnectionString);
var databaseName = !string.IsNullOrWhiteSpace(mongoOptions.Database)
? mongoOptions.Database
: mongoUrl.DatabaseName ?? "stellaops-taskrunner";
return client.GetDatabase(databaseName);
});
builder.Services.AddSingleton<IPackRunStateStore, MongoPackRunStateStore>();
builder.Services.AddSingleton<IPackRunLogStore, MongoPackRunLogStore>();
builder.Services.AddSingleton<IPackRunApprovalStore, MongoPackRunApprovalStore>();
}
else
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
return new FilePackRunStateStore(options.RunStatePath);
});
builder.Services.AddSingleton<IPackRunApprovalStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
return new FilePackRunApprovalStore(options.ApprovalStorePath);
});
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
return new FilePackRunStateStore(options.RunStatePath);
});
builder.Services.AddSingleton<IPackRunLogStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
return new FilePackRunLogStore(options.LogsPath);
});
}
builder.Services.AddSingleton(sp =>
{
var options = sp.GetRequiredService<IOptions<TaskRunnerServiceOptions>>().Value;
@@ -77,8 +115,89 @@ app.MapPost("/v1/task-runner/simulations", async (
var simulation = simulationEngine.Simulate(plan);
var response = SimulationMapper.ToResponse(plan, simulation);
return Results.Ok(response);
}).WithName("SimulateTaskPack");
}).WithName("SimulateTaskPack");
app.MapPost("/v1/task-runner/runs", async (
[FromBody] CreateRunRequest request,
TaskPackManifestLoader loader,
TaskPackPlanner planner,
PackRunExecutionGraphBuilder executionGraphBuilder,
PackRunSimulationEngine simulationEngine,
IPackRunStateStore stateStore,
IPackRunLogStore logStore,
IPackRunJobScheduler scheduler,
CancellationToken cancellationToken) =>
{
if (request is null || string.IsNullOrWhiteSpace(request.Manifest))
{
return Results.BadRequest(new { error = "Manifest is required." });
}
TaskPackManifest manifest;
try
{
manifest = loader.Deserialize(request.Manifest);
}
catch (Exception ex)
{
return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
}
var inputs = ConvertInputs(request.Inputs);
var planResult = planner.Plan(manifest, inputs);
if (!planResult.Success || planResult.Plan is null)
{
return Results.BadRequest(new
{
errors = planResult.Errors.Select(error => new { error.Path, error.Message })
});
}
var plan = planResult.Plan;
var runId = string.IsNullOrWhiteSpace(request.RunId)
? Guid.NewGuid().ToString("n")
: request.RunId!;
var existing = await stateStore.GetAsync(runId, cancellationToken).ConfigureAwait(false);
if (existing is not null)
{
return Results.Conflict(new { error = "Run already exists." });
}
var requestedAt = DateTimeOffset.UtcNow;
var context = new PackRunExecutionContext(runId, plan, requestedAt);
var graph = executionGraphBuilder.Build(plan);
var state = PackRunStateFactory.CreateInitialState(context, graph, simulationEngine, requestedAt);
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
try
{
await scheduler.ScheduleAsync(context, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
await logStore.AppendAsync(
runId,
new PackRunLogEntry(DateTimeOffset.UtcNow, "error", "run.schedule-failed", ex.Message, null, null),
cancellationToken).ConfigureAwait(false);
return Results.StatusCode(StatusCodes.Status500InternalServerError);
}
var metadata = new Dictionary<string, string>(StringComparer.Ordinal);
metadata["planHash"] = plan.Hash;
metadata["requestedAt"] = requestedAt.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture);
await logStore.AppendAsync(
runId,
new PackRunLogEntry(DateTimeOffset.UtcNow, "info", "run.created", "Run created via API.", null, metadata),
cancellationToken).ConfigureAwait(false);
var response = RunStateMapper.ToResponse(state);
return Results.Created($"/v1/task-runner/runs/{runId}", response);
}).WithName("CreatePackRun");
app.MapGet("/v1/task-runner/runs/{runId}", async (
string runId,
IPackRunStateStore stateStore,
@@ -94,10 +213,34 @@ app.MapGet("/v1/task-runner/runs/{runId}", async (
{
return Results.NotFound();
}
return Results.Ok(RunStateMapper.ToResponse(state));
}).WithName("GetRunState");
app.MapGet("/v1/task-runner/runs/{runId}/logs", async (
string runId,
IPackRunLogStore logStore,
CancellationToken cancellationToken) =>
{
if (string.IsNullOrWhiteSpace(runId))
{
return Results.BadRequest(new { error = "runId is required." });
}
if (!await logStore.ExistsAsync(runId, cancellationToken).ConfigureAwait(false))
{
return Results.NotFound();
}
return Results.Stream(async (stream, ct) =>
{
await foreach (var entry in logStore.ReadAsync(runId, ct).ConfigureAwait(false))
{
await RunLogMapper.WriteAsync(stream, entry, ct).ConfigureAwait(false);
}
}, "application/x-ndjson");
}).WithName("StreamRunLogs");
app.MapPost("/v1/task-runner/runs/{runId}/approvals/{approvalId}", async (
string runId,
string approvalId,
@@ -151,12 +294,14 @@ static IDictionary<string, JsonNode?>? ConvertInputs(JsonObject? node)
return dictionary;
}
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
internal sealed record SimulationResponse(
string PlanHash,
FailurePolicyResponse FailurePolicy,
IReadOnlyList<SimulationStepResponse> Steps,
internal sealed record CreateRunRequest(string? RunId, string Manifest, JsonObject? Inputs);
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
internal sealed record SimulationResponse(
string PlanHash,
FailurePolicyResponse FailurePolicy,
IReadOnlyList<SimulationStepResponse> Steps,
IReadOnlyList<SimulationOutputResponse> Outputs,
bool HasPendingApprovals);
@@ -206,9 +351,54 @@ internal sealed record RunStateStepResponse(
string? StatusReason);
internal sealed record ApprovalDecisionDto(string Decision, string? ActorId, string? Summary);
internal static class SimulationMapper
{
internal sealed record RunLogEntryResponse(
DateTimeOffset Timestamp,
string Level,
string EventType,
string Message,
string? StepId,
IReadOnlyDictionary<string, string>? Metadata);
internal static class RunLogMapper
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private static readonly byte[] NewLine = Encoding.UTF8.GetBytes("\n");
public static RunLogEntryResponse ToResponse(PackRunLogEntry entry)
{
IReadOnlyDictionary<string, string>? metadata = null;
if (entry.Metadata is { Count: > 0 })
{
metadata = entry.Metadata
.OrderBy(pair => pair.Key, StringComparer.Ordinal)
.ToDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal);
}
return new RunLogEntryResponse(
entry.Timestamp,
entry.Level,
entry.EventType,
entry.Message,
entry.StepId,
metadata);
}
public static async Task WriteAsync(Stream stream, PackRunLogEntry entry, CancellationToken cancellationToken)
{
var response = ToResponse(entry);
await JsonSerializer.SerializeAsync(stream, response, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.WriteAsync(NewLine, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
}
internal static class SimulationMapper
{
public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
{
var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;

View File

@@ -1,9 +1,14 @@
namespace StellaOps.TaskRunner.WebService;
using StellaOps.TaskRunner.Core.Configuration;
namespace StellaOps.TaskRunner.WebService;
public sealed class TaskRunnerServiceOptions
{
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals");
public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue");
public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive");
public string LogsPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "logs", "runs");
public TaskRunnerStorageOptions Storage { get; set; } = new();
}

View File

@@ -1,9 +1,11 @@
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.AirGap.Policy;
using StellaOps.TaskRunner.Core.Configuration;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
using StellaOps.TaskRunner.Infrastructure.Execution;
using StellaOps.TaskRunner.Worker.Services;
using StellaOps.TaskRunner.Core.Execution.Simulation;
var builder = Host.CreateApplicationBuilder(args);
@@ -11,13 +13,8 @@ builder.Services.AddAirGapEgressPolicy(builder.Configuration, sectionName: "AirG
builder.Services.Configure<PackRunWorkerOptions>(builder.Configuration.GetSection("Worker"));
builder.Services.Configure<NotificationOptions>(builder.Configuration.GetSection("Notifications"));
builder.Services.AddHttpClient("taskrunner-notifications");
builder.Services.AddSingleton<IPackRunApprovalStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
return new FilePackRunApprovalStore(options.Value.ApprovalStorePath);
});
builder.Services.AddSingleton(TimeProvider.System);
builder.Services.AddSingleton(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
@@ -30,34 +27,71 @@ builder.Services.AddSingleton<IPackRunJobScheduler>(sp => sp.GetRequiredService<
builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp =>
{
var options = sp.GetRequiredService<IOptions<NotificationOptions>>().Value;
if (options.ApprovalEndpoint is not null || options.PolicyEndpoint is not null)
{
return new HttpPackRunNotificationPublisher(
sp.GetRequiredService<IHttpClientFactory>(),
sp.GetRequiredService<IOptions<NotificationOptions>>(),
sp.GetRequiredService<ILogger<HttpPackRunNotificationPublisher>>());
}
if (options.ApprovalEndpoint is not null || options.PolicyEndpoint is not null)
{
return new HttpPackRunNotificationPublisher(
sp.GetRequiredService<IHttpClientFactory>(),
sp.GetRequiredService<IOptions<NotificationOptions>>(),
sp.GetRequiredService<ILogger<HttpPackRunNotificationPublisher>>());
}
return new LoggingPackRunNotificationPublisher(sp.GetRequiredService<ILogger<LoggingPackRunNotificationPublisher>>());
});
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
return new FilePackRunStateStore(options.Value.RunStatePath);
});
builder.Services.AddSingleton<IPackRunStepExecutor, NoopPackRunStepExecutor>();
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
builder.Services.AddSingleton<PackRunSimulationEngine>();
builder.Services.AddSingleton<PackRunProcessor>();
builder.Services.AddSingleton<IPackRunArtifactUploader>(sp =>
var workerStorageOptions = builder.Configuration.GetSection("Worker:Storage").Get<TaskRunnerStorageOptions>() ?? new TaskRunnerStorageOptions();
builder.Services.AddSingleton(workerStorageOptions);
if (string.Equals(workerStorageOptions.Mode, TaskRunnerStorageModes.Mongo, StringComparison.OrdinalIgnoreCase))
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>().Value;
var timeProvider = sp.GetService<TimeProvider>();
var logger = sp.GetRequiredService<ILogger<FilesystemPackRunArtifactUploader>>();
return new FilesystemPackRunArtifactUploader(options.ArtifactsPath, timeProvider, logger);
});
builder.Services.AddSingleton(workerStorageOptions.Mongo);
builder.Services.AddSingleton<IMongoClient>(_ => new MongoClient(workerStorageOptions.Mongo.ConnectionString));
builder.Services.AddSingleton<IMongoDatabase>(sp =>
{
var mongoOptions = workerStorageOptions.Mongo;
var client = sp.GetRequiredService<IMongoClient>();
var mongoUrl = MongoUrl.Create(mongoOptions.ConnectionString);
var databaseName = !string.IsNullOrWhiteSpace(mongoOptions.Database)
? mongoOptions.Database
: mongoUrl.DatabaseName ?? "stellaops-taskrunner";
return client.GetDatabase(databaseName);
});
builder.Services.AddSingleton<IPackRunStateStore, MongoPackRunStateStore>();
builder.Services.AddSingleton<IPackRunLogStore, MongoPackRunLogStore>();
builder.Services.AddSingleton<IPackRunApprovalStore, MongoPackRunApprovalStore>();
builder.Services.AddSingleton<IPackRunArtifactUploader, MongoPackRunArtifactUploader>();
}
else
{
builder.Services.AddSingleton<IPackRunApprovalStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
return new FilePackRunApprovalStore(options.Value.ApprovalStorePath);
});
builder.Services.AddSingleton<IPackRunStateStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
return new FilePackRunStateStore(options.Value.RunStatePath);
});
builder.Services.AddSingleton<IPackRunLogStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>();
return new FilePackRunLogStore(options.Value.LogsPath);
});
builder.Services.AddSingleton<IPackRunArtifactUploader>(sp =>
{
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>().Value;
var timeProvider = sp.GetRequiredService<TimeProvider>();
var logger = sp.GetRequiredService<ILogger<FilesystemPackRunArtifactUploader>>();
return new FilesystemPackRunArtifactUploader(options.ArtifactsPath, timeProvider, logger);
});
}
builder.Services.AddHostedService<PackRunWorkerService>();
var host = builder.Build();

View File

@@ -1,9 +1,11 @@
namespace StellaOps.TaskRunner.Worker.Services;
public sealed class PackRunWorkerOptions
{
public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(1);
using StellaOps.TaskRunner.Core.Configuration;
namespace StellaOps.TaskRunner.Worker.Services;
public sealed class PackRunWorkerOptions
{
public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(1);
public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue");
public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive");
@@ -13,4 +15,8 @@ public sealed class PackRunWorkerOptions
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
public string ArtifactsPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "artifacts");
public string LogsPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "logs", "runs");
public TaskRunnerStorageOptions Storage { get; set; } = new();
}

View File

@@ -1,6 +1,7 @@
using System.Collections.Concurrent;
using System.Collections.ObjectModel;
using System.Text.Json.Nodes;
using System.Collections.Concurrent;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Text.Json.Nodes;
using Microsoft.Extensions.Options;
using StellaOps.TaskRunner.Core.Execution;
using StellaOps.TaskRunner.Core.Execution.Simulation;
@@ -21,6 +22,7 @@ public sealed class PackRunWorkerService : BackgroundService
private readonly PackRunSimulationEngine simulationEngine;
private readonly IPackRunStepExecutor executor;
private readonly IPackRunArtifactUploader artifactUploader;
private readonly IPackRunLogStore logStore;
private readonly ILogger<PackRunWorkerService> logger;
public PackRunWorkerService(
@@ -31,6 +33,7 @@ public sealed class PackRunWorkerService : BackgroundService
PackRunSimulationEngine simulationEngine,
IPackRunStepExecutor executor,
IPackRunArtifactUploader artifactUploader,
IPackRunLogStore logStore,
IOptions<PackRunWorkerOptions> options,
ILogger<PackRunWorkerService> logger)
{
@@ -41,6 +44,7 @@ public sealed class PackRunWorkerService : BackgroundService
this.simulationEngine = simulationEngine ?? throw new ArgumentNullException(nameof(simulationEngine));
this.executor = executor ?? throw new ArgumentNullException(nameof(executor));
this.artifactUploader = artifactUploader ?? throw new ArgumentNullException(nameof(artifactUploader));
this.logStore = logStore ?? throw new ArgumentNullException(nameof(logStore));
this.options = options?.Value ?? throw new ArgumentNullException(nameof(options));
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
@@ -63,122 +67,126 @@ public sealed class PackRunWorkerService : BackgroundService
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
logger.LogError(ex, "Unhandled exception while processing run {RunId}.", context.RunId);
}
}
}
private async Task ProcessRunAsync(PackRunExecutionContext context, CancellationToken cancellationToken)
{
logger.LogInformation("Processing pack run {RunId}.", context.RunId);
var processorResult = await processor.ProcessNewRunAsync(context, cancellationToken).ConfigureAwait(false);
var graph = graphBuilder.Build(context.Plan);
var state = await stateStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false);
if (state is null || !string.Equals(state.PlanHash, context.Plan.Hash, StringComparison.Ordinal))
{
state = await CreateInitialStateAsync(context, graph, cancellationToken).ConfigureAwait(false);
}
if (!processorResult.ShouldResumeImmediately)
{
logger.LogInformation("Run {RunId} awaiting approvals or policy gates.", context.RunId);
return;
}
var gateUpdate = PackRunGateStateUpdater.Apply(state, graph, processorResult.ApprovalCoordinator, DateTimeOffset.UtcNow);
state = gateUpdate.State;
if (gateUpdate.HasBlockingFailure)
{
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
logger.LogWarning("Run {RunId} halted because a gate failed.", context.RunId);
return;
}
var updatedState = await ExecuteGraphAsync(context, graph, state, cancellationToken).ConfigureAwait(false);
await stateStore.SaveAsync(updatedState, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
logger.LogError(ex, "Unhandled exception while processing run {RunId}.", context.RunId);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["exceptionType"] = ex.GetType().FullName ?? ex.GetType().Name
};
await AppendLogAsync(
context.RunId,
"error",
"run.failed",
"Unhandled exception while processing run.",
stoppingToken,
metadata: metadata).ConfigureAwait(false);
}
}
}
private async Task ProcessRunAsync(PackRunExecutionContext context, CancellationToken cancellationToken)
{
logger.LogInformation("Processing pack run {RunId}.", context.RunId);
await AppendLogAsync(
context.RunId,
"info",
"run.received",
"Run dequeued by worker.",
cancellationToken,
metadata: new Dictionary<string, string>(StringComparer.Ordinal)
{
["planHash"] = context.Plan.Hash
}).ConfigureAwait(false);
var processorResult = await processor.ProcessNewRunAsync(context, cancellationToken).ConfigureAwait(false);
var graph = graphBuilder.Build(context.Plan);
var state = await stateStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false);
if (state is null || !string.Equals(state.PlanHash, context.Plan.Hash, StringComparison.Ordinal))
{
state = await CreateInitialStateAsync(context, graph, cancellationToken).ConfigureAwait(false);
}
if (!processorResult.ShouldResumeImmediately)
{
logger.LogInformation("Run {RunId} awaiting approvals or policy gates.", context.RunId);
await AppendLogAsync(
context.RunId,
"info",
"run.awaiting-approvals",
"Run paused awaiting approvals or policy gates.",
cancellationToken).ConfigureAwait(false);
return;
}
var gateUpdate = PackRunGateStateUpdater.Apply(state, graph, processorResult.ApprovalCoordinator, DateTimeOffset.UtcNow);
state = gateUpdate.State;
if (gateUpdate.HasBlockingFailure)
{
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
logger.LogWarning("Run {RunId} halted because a gate failed.", context.RunId);
await AppendLogAsync(
context.RunId,
"warn",
"run.gate-blocked",
"Run halted because a gate failed.",
cancellationToken).ConfigureAwait(false);
return;
}
var updatedState = await ExecuteGraphAsync(context, graph, state, cancellationToken).ConfigureAwait(false);
await stateStore.SaveAsync(updatedState, cancellationToken).ConfigureAwait(false);
if (updatedState.Steps.Values.All(step => step.Status is PackRunStepExecutionStatus.Succeeded or PackRunStepExecutionStatus.Skipped))
{
logger.LogInformation("Run {RunId} finished successfully.", context.RunId);
await AppendLogAsync(
context.RunId,
"info",
"run.completed",
"Run finished successfully.",
cancellationToken).ConfigureAwait(false);
await artifactUploader.UploadAsync(context, updatedState, context.Plan.Outputs, cancellationToken).ConfigureAwait(false);
}
else
{
logger.LogInformation("Run {RunId} paused with pending work.", context.RunId);
await AppendLogAsync(
context.RunId,
"info",
"run.paused",
"Run paused with pending work.",
cancellationToken).ConfigureAwait(false);
}
}
private async Task<PackRunState> CreateInitialStateAsync(
PackRunExecutionContext context,
PackRunExecutionGraph graph,
CancellationToken cancellationToken)
{
var timestamp = DateTimeOffset.UtcNow;
var simulation = simulationEngine.Simulate(context.Plan);
var simulationIndex = IndexSimulation(simulation.Steps);
var stepRecords = new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal);
foreach (var step in EnumerateSteps(graph.Steps))
{
var simulationStatus = simulationIndex.TryGetValue(step.Id, out var node)
? node.Status
: PackRunSimulationStatus.Pending;
var status = step.Enabled ? PackRunStepExecutionStatus.Pending : PackRunStepExecutionStatus.Skipped;
string? statusReason = null;
if (!step.Enabled)
{
statusReason = "disabled";
}
else if (simulationStatus == PackRunSimulationStatus.RequiresApproval)
{
statusReason = "requires-approval";
}
else if (simulationStatus == PackRunSimulationStatus.RequiresPolicy)
{
statusReason = "requires-policy";
}
else if (simulationStatus == PackRunSimulationStatus.Skipped)
{
status = PackRunStepExecutionStatus.Skipped;
statusReason = "condition-false";
}
var record = new PackRunStepStateRecord(
step.Id,
step.Kind,
step.Enabled,
step.ContinueOnError,
step.MaxParallel,
step.ApprovalId,
step.GateMessage,
status,
Attempts: 0,
LastTransitionAt: null,
NextAttemptAt: null,
StatusReason: statusReason);
stepRecords[step.Id] = record;
}
var failurePolicy = graph.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
var state = PackRunState.Create(
context.RunId,
context.Plan.Hash,
context.Plan,
failurePolicy,
context.RequestedAt,
stepRecords,
timestamp);
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
return state;
}
}
private async Task<PackRunState> CreateInitialStateAsync(
PackRunExecutionContext context,
PackRunExecutionGraph graph,
CancellationToken cancellationToken)
{
var timestamp = DateTimeOffset.UtcNow;
var state = PackRunStateFactory.CreateInitialState(context, graph, simulationEngine, timestamp);
await stateStore.SaveAsync(state, cancellationToken).ConfigureAwait(false);
return state;
}
private Task AppendLogAsync(
string runId,
string level,
string eventType,
string message,
CancellationToken cancellationToken,
string? stepId = null,
IReadOnlyDictionary<string, string>? metadata = null)
{
var entry = new PackRunLogEntry(DateTimeOffset.UtcNow, level, eventType, message, stepId, metadata);
return logStore.AppendAsync(runId, entry, cancellationToken);
}
private async Task<PackRunState> ExecuteGraphAsync(
PackRunExecutionContext context,
@@ -228,52 +236,83 @@ public sealed class PackRunWorkerService : BackgroundService
return StepExecutionOutcome.Continue;
}
if (record.NextAttemptAt is { } scheduled && scheduled > DateTimeOffset.UtcNow)
{
logger.LogInformation(
"Run {RunId} step {StepId} waiting until {NextAttempt} for retry.",
executionContext.RunId,
record.StepId,
scheduled);
return StepExecutionOutcome.Defer;
}
switch (step.Kind)
{
case PackRunStepKind.GateApproval:
case PackRunStepKind.GatePolicy:
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Succeeded,
if (record.NextAttemptAt is { } scheduled && scheduled > DateTimeOffset.UtcNow)
{
logger.LogInformation(
"Run {RunId} step {StepId} waiting until {NextAttempt} for retry.",
executionContext.RunId,
record.StepId,
scheduled);
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["nextAttemptAt"] = scheduled.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture),
["attempts"] = record.Attempts.ToString(CultureInfo.InvariantCulture)
};
await AppendLogAsync(
executionContext.RunId,
"info",
"step.awaiting-retry",
$"Step {record.StepId} waiting for retry.",
executionContext.CancellationToken,
record.StepId,
metadata).ConfigureAwait(false);
return StepExecutionOutcome.Defer;
}
switch (step.Kind)
{
case PackRunStepKind.GateApproval:
case PackRunStepKind.GatePolicy:
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Succeeded,
StatusReason = null,
LastTransitionAt = DateTimeOffset.UtcNow,
NextAttemptAt = null
};
return StepExecutionOutcome.Continue;
case PackRunStepKind.Parallel:
return await ExecuteParallelStepAsync(step, executionContext).ConfigureAwait(false);
case PackRunStepKind.Map:
LastTransitionAt = DateTimeOffset.UtcNow,
NextAttemptAt = null
};
await AppendLogAsync(
executionContext.RunId,
"info",
step.Kind == PackRunStepKind.GateApproval ? "step.approval-satisfied" : "step.policy-satisfied",
$"Gate {step.Id} satisfied.",
executionContext.CancellationToken,
step.Id).ConfigureAwait(false);
return StepExecutionOutcome.Continue;
case PackRunStepKind.Parallel:
return await ExecuteParallelStepAsync(step, executionContext).ConfigureAwait(false);
case PackRunStepKind.Map:
return await ExecuteMapStepAsync(step, executionContext).ConfigureAwait(false);
case PackRunStepKind.Run:
return await ExecuteRunStepAsync(step, executionContext).ConfigureAwait(false);
default:
logger.LogWarning("Run {RunId} encountered unsupported step kind '{Kind}' for step {StepId}. Marking as skipped.",
executionContext.RunId,
step.Kind,
step.Id);
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Skipped,
StatusReason = "unsupported-kind",
LastTransitionAt = DateTimeOffset.UtcNow
};
return StepExecutionOutcome.Continue;
}
}
default:
logger.LogWarning("Run {RunId} encountered unsupported step kind '{Kind}' for step {StepId}. Marking as skipped.",
executionContext.RunId,
step.Kind,
step.Id);
executionContext.Steps[step.Id] = record with
{
Status = PackRunStepExecutionStatus.Skipped,
StatusReason = "unsupported-kind",
LastTransitionAt = DateTimeOffset.UtcNow
};
await AppendLogAsync(
executionContext.RunId,
"warn",
"step.skipped",
"Step skipped because the step kind is unsupported.",
executionContext.CancellationToken,
step.Id,
new Dictionary<string, string>(StringComparer.Ordinal)
{
["kind"] = step.Kind.ToString()
}).ConfigureAwait(false);
return StepExecutionOutcome.Continue;
}
}
private async Task<StepExecutionOutcome> ExecuteRunStepAsync(
PackRunExecutionStep step,
@@ -283,57 +322,124 @@ public sealed class PackRunWorkerService : BackgroundService
var now = DateTimeOffset.UtcNow;
var currentState = new PackRunStepState(record.Status, record.Attempts, record.LastTransitionAt, record.NextAttemptAt);
if (currentState.Status == PackRunStepExecutionStatus.Pending)
{
currentState = PackRunStepStateMachine.Start(currentState, now);
record = record with
{
Status = currentState.Status,
LastTransitionAt = currentState.LastTransitionAt,
NextAttemptAt = currentState.NextAttemptAt,
StatusReason = null
};
executionContext.Steps[step.Id] = record;
}
var result = await executor.ExecuteAsync(step, step.Parameters ?? PackRunExecutionStep.EmptyParameters, executionContext.CancellationToken).ConfigureAwait(false);
if (result.Succeeded)
{
currentState = PackRunStepStateMachine.CompleteSuccess(currentState, DateTimeOffset.UtcNow);
executionContext.Steps[step.Id] = record with
{
Status = currentState.Status,
Attempts = currentState.Attempts,
LastTransitionAt = currentState.LastTransitionAt,
NextAttemptAt = currentState.NextAttemptAt,
StatusReason = null
};
return StepExecutionOutcome.Continue;
}
logger.LogWarning(
"Run {RunId} step {StepId} failed: {Error}",
executionContext.RunId,
step.Id,
result.Error ?? "unknown error");
var failure = PackRunStepStateMachine.RegisterFailure(currentState, DateTimeOffset.UtcNow, executionContext.FailurePolicy);
var updatedRecord = record with
{
Status = failure.State.Status,
Attempts = failure.State.Attempts,
LastTransitionAt = failure.State.LastTransitionAt,
NextAttemptAt = failure.State.NextAttemptAt,
StatusReason = result.Error
};
executionContext.Steps[step.Id] = updatedRecord;
return failure.Outcome switch
{
PackRunStepFailureOutcome.Retry => StepExecutionOutcome.Defer,
PackRunStepFailureOutcome.Abort when step.ContinueOnError => StepExecutionOutcome.Continue,
if (currentState.Status == PackRunStepExecutionStatus.Pending)
{
currentState = PackRunStepStateMachine.Start(currentState, now);
record = record with
{
Status = currentState.Status,
LastTransitionAt = currentState.LastTransitionAt,
NextAttemptAt = currentState.NextAttemptAt,
StatusReason = null
};
executionContext.Steps[step.Id] = record;
var startMetadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["attempt"] = currentState.Attempts.ToString(CultureInfo.InvariantCulture)
};
await AppendLogAsync(
executionContext.RunId,
"info",
"step.started",
$"Step {step.Id} started.",
executionContext.CancellationToken,
step.Id,
startMetadata).ConfigureAwait(false);
}
var result = await executor.ExecuteAsync(step, step.Parameters ?? PackRunExecutionStep.EmptyParameters, executionContext.CancellationToken).ConfigureAwait(false);
if (result.Succeeded)
{
currentState = PackRunStepStateMachine.CompleteSuccess(currentState, DateTimeOffset.UtcNow);
executionContext.Steps[step.Id] = record with
{
Status = currentState.Status,
Attempts = currentState.Attempts,
LastTransitionAt = currentState.LastTransitionAt,
NextAttemptAt = currentState.NextAttemptAt,
StatusReason = null
};
var successMetadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["attempt"] = currentState.Attempts.ToString(CultureInfo.InvariantCulture)
};
await AppendLogAsync(
executionContext.RunId,
"info",
"step.succeeded",
$"Step {step.Id} succeeded.",
executionContext.CancellationToken,
step.Id,
successMetadata).ConfigureAwait(false);
return StepExecutionOutcome.Continue;
}
logger.LogWarning(
"Run {RunId} step {StepId} failed: {Error}",
executionContext.RunId,
step.Id,
result.Error ?? "unknown error");
var failure = PackRunStepStateMachine.RegisterFailure(currentState, DateTimeOffset.UtcNow, executionContext.FailurePolicy);
var updatedRecord = record with
{
Status = failure.State.Status,
Attempts = failure.State.Attempts,
LastTransitionAt = failure.State.LastTransitionAt,
NextAttemptAt = failure.State.NextAttemptAt,
StatusReason = result.Error
};
executionContext.Steps[step.Id] = updatedRecord;
var failureMetadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["attempt"] = failure.State.Attempts.ToString(CultureInfo.InvariantCulture)
};
if (!string.IsNullOrWhiteSpace(result.Error))
{
failureMetadata["error"] = result.Error;
}
if (failure.State.NextAttemptAt is { } retryAt)
{
failureMetadata["nextAttemptAt"] = retryAt.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture);
}
var failureLevel = failure.Outcome == PackRunStepFailureOutcome.Abort && !step.ContinueOnError
? "error"
: "warn";
await AppendLogAsync(
executionContext.RunId,
failureLevel,
"step.failed",
$"Step {step.Id} failed.",
executionContext.CancellationToken,
step.Id,
failureMetadata).ConfigureAwait(false);
if (failure.Outcome == PackRunStepFailureOutcome.Retry)
{
var retryMetadata = new Dictionary<string, string>(failureMetadata, StringComparer.Ordinal)
{
["outcome"] = "retry"
};
await AppendLogAsync(
executionContext.RunId,
"info",
"step.retry-scheduled",
$"Step {step.Id} scheduled for retry.",
executionContext.CancellationToken,
step.Id,
retryMetadata).ConfigureAwait(false);
}
return failure.Outcome switch
{
PackRunStepFailureOutcome.Retry => StepExecutionOutcome.Defer,
PackRunStepFailureOutcome.Abort when step.ContinueOnError => StepExecutionOutcome.Continue,
PackRunStepFailureOutcome.Abort => StepExecutionOutcome.AbortRun,
_ => StepExecutionOutcome.AbortRun
};
@@ -503,44 +609,11 @@ public sealed class PackRunWorkerService : BackgroundService
};
}
private static Dictionary<string, PackRunSimulationNode> IndexSimulation(IReadOnlyList<PackRunSimulationNode> steps)
{
var result = new Dictionary<string, PackRunSimulationNode>(StringComparer.Ordinal);
foreach (var node in steps)
{
result[node.Id] = node;
if (node.Children.Count > 0)
{
foreach (var child in IndexSimulation(node.Children))
{
result[child.Key] = child.Value;
}
}
}
return result;
}
private static IEnumerable<PackRunExecutionStep> EnumerateSteps(IReadOnlyList<PackRunExecutionStep> steps)
{
foreach (var step in steps)
{
yield return step;
if (step.Children.Count > 0)
{
foreach (var child in EnumerateSteps(step.Children))
{
yield return child;
}
}
}
}
private sealed record ExecutionContext(
string RunId,
TaskPackPlanFailurePolicy FailurePolicy,
ConcurrentDictionary<string, PackRunStepStateRecord> Steps,
CancellationToken CancellationToken);
private sealed record ExecutionContext(
string RunId,
TaskPackPlanFailurePolicy FailurePolicy,
ConcurrentDictionary<string, PackRunStepStateRecord> Steps,
CancellationToken CancellationToken);
private enum StepExecutionOutcome
{

View File

@@ -1,11 +1,12 @@
# TASKS — Epic 1: Aggregation-Only Contract
| ID | Status | Owner(s) | Depends on | Notes |
|----|--------|----------|------------|-------|
| WEB-AOC-19-001 `Shared AOC guard primitives` | DOING (2025-10-26) | BE-Base Platform Guild | — | Provide `AOCForbiddenKeys`, guard middleware/interceptor hooks, and error types (`AOCError`, `AOCViolationCode`) for ingestion services. Publish sample usage + analyzer to ensure guard registered. |
| WEB-AOC-19-001 `Shared AOC guard primitives` | DONE (2025-11-07) | BE-Base Platform Guild | — | Provide `AOCForbiddenKeys`, guard middleware/interceptor hooks, and error types (`AOCError`, `AOCViolationCode`) for ingestion services. Publish sample usage + analyzer to ensure guard registered. |
> 2025-10-26: Introduced `StellaOps.Aoc` library with forbidden key list, guard result/options, and baseline write guard + tests. Middleware/analyzer wiring still pending.
> 2025-10-30: Added `StellaOps.Aoc.AspNetCore` helpers (`AddAocGuard`, `AocHttpResults`) and switched Concelier WebService to the shared problem-details mapper; analyzer wiring remains pending.
> 2025-10-30: Published `docs/aoc/guard-library.md` covering registration patterns, endpoint filters, and error mapping for ingestion services.
> 2025-11-06: Added `RequireAocGuard` route helper, wired Concelier advisory ingestion endpoint to the shared filter, refreshed docs, and introduced extension tests.
> 2025-11-07: Enforced allowed top-level field detection (`ERR_AOC_007`), introduced the shared `AocError` DTO/HTTP response payload, updated docs, and expanded test coverage.
| WEB-AOC-19-002 `Provenance & signature helpers` | TODO | BE-Base Platform Guild | WEB-AOC-19-001 | Ship `ProvenanceBuilder`, checksum utilities, and signature verification helper integrated with guard logging. Cover DSSE/CMS formats with unit tests. |
| WEB-AOC-19-003 `Analyzer + test fixtures` | TODO | QA Guild, BE-Base Platform Guild | WEB-AOC-19-001 | Author Roslyn analyzer preventing ingestion modules from writing forbidden keys without guard, and provide shared test fixtures for guard validation used by Concelier/Excititor service tests. |
> Docs alignment (2025-10-26): Analyzer expectations detailed in `docs/ingestion/aggregation-only-contract.md` §3/5; CI integration tracked via DEVOPS-AOC-19-001.

View File

@@ -53,9 +53,7 @@ internal sealed class AwsKmsFacade : IAwsKmsFacade
config.ServiceURL = options.Endpoint;
}
config.UseFIPSEndpoint = options.UseFipsEndpoint
? UseFIPSEndpointState.Enabled
: UseFIPSEndpointState.Disabled;
config.UseFIPSEndpoint = options.UseFipsEndpoint;
_client = new AmazonKeyManagementServiceClient(config);
_ownsClient = true;
@@ -94,17 +92,18 @@ internal sealed class AwsKmsFacade : IAwsKmsFacade
}
using var messageStream = new MemoryStream(digest.ToArray(), writable: false);
using var signatureStream = new MemoryStream(signature.ToArray(), writable: false);
var request = new VerifyRequest
{
KeyId = keyResource,
SigningAlgorithm = SigningAlgorithmSpec.ECDSA_SHA_256,
MessageType = MessageType.DIGEST,
Message = messageStream,
Signature = signature.ToArray(),
Signature = signatureStream,
};
var response = await _client.VerifyAsync(request, cancellationToken).ConfigureAwait(false);
return response.SignatureValid;
return response.SignatureValid ?? false;
}
public async Task<AwsKeyMetadata> GetMetadataAsync(string keyId, CancellationToken cancellationToken)
@@ -143,16 +142,18 @@ internal sealed class AwsKmsFacade : IAwsKmsFacade
}
private static AwsKeyStatus MapStatus(KeyState? state)
=> state switch
{
var name = state?.ToString();
return name switch
{
KeyState.Enabled => AwsKeyStatus.Enabled,
KeyState.Disabled => AwsKeyStatus.Disabled,
KeyState.PendingDeletion => AwsKeyStatus.PendingDeletion,
KeyState.PendingImport => AwsKeyStatus.PendingImport,
KeyState.PendingUpdate => AwsKeyStatus.PendingUpdate,
KeyState.Unavailable => AwsKeyStatus.Unavailable,
"Enabled" => AwsKeyStatus.Enabled,
"Disabled" => AwsKeyStatus.Disabled,
"PendingDeletion" => AwsKeyStatus.PendingDeletion,
"PendingImport" => AwsKeyStatus.PendingImport,
"Unavailable" => AwsKeyStatus.Unavailable,
_ => AwsKeyStatus.Unspecified,
};
}
private static string ResolveCurve(GetPublicKeyResponse response)
{
@@ -163,14 +164,18 @@ internal sealed class AwsKmsFacade : IAwsKmsFacade
if (response.KeySpec is not null)
{
return response.KeySpec.Value switch
var keySpecName = response.KeySpec.ToString();
if (!string.IsNullOrWhiteSpace(keySpecName))
{
KeySpec.ECC_NIST_P256 => "P-256",
KeySpec.ECC_SECG_P256K1 => "secp256k1",
KeySpec.ECC_NIST_P384 => "P-384",
KeySpec.ECC_NIST_P521 => "P-521",
_ => response.KeySpec.Value.ToString(),
};
return keySpecName switch
{
"ECC_NIST_P256" => "P-256",
"ECC_SECG_P256K1" => "secp256k1",
"ECC_NIST_P384" => "P-384",
"ECC_NIST_P521" => "P-521",
_ => keySpecName,
};
}
}
return "P-256";

View File

@@ -51,7 +51,7 @@ internal sealed class GcpKmsFacade : IGcpKmsFacade
var builder = new KeyManagementServiceClientBuilder
{
Endpoint = string.IsNullOrWhiteSpace(options.Endpoint)
? KeyManagementServiceClient.DefaultEndpoint.Host
? KeyManagementServiceClient.DefaultEndpoint
: options.Endpoint,
};
@@ -149,9 +149,9 @@ internal sealed class GcpKmsFacade : IGcpKmsFacade
public void Dispose()
{
if (_ownsClient)
if (_ownsClient && _client is IDisposable disposable)
{
_client.Dispose();
disposable.Dispose();
}
}

View File

@@ -10,6 +10,7 @@
<PackageReference Include="AWSSDK.KeyManagementService" Version="4.0.6" />
<PackageReference Include="Google.Cloud.Kms.V1" Version="3.19.0" />
<PackageReference Include="Pkcs11Interop" Version="4.1.0" />
<PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />