feat(api): Add Policy Registry API specification
Some checks failed
AOC Guard CI / aoc-verify (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
mock-dev-release / package-mock-release (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-verify (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
mock-dev-release / package-mock-release (push) Has been cancelled
- Introduced OpenAPI specification for the StellaOps Policy Registry API, covering endpoints for verification policies, policy packs, snapshots, violations, overrides, sealed mode operations, and advisory staleness tracking. - Defined schemas, parameters, and responses for comprehensive API documentation. chore(scanner): Add global usings for scanner analyzers - Created GlobalUsings.cs to simplify namespace usage across analyzer libraries. feat(scanner): Implement Surface Service Collection Extensions - Added SurfaceServiceCollectionExtensions for dependency injection registration of surface analysis services. - Included methods for adding surface analysis, surface collectors, and entry point collectors to the service collection.
This commit is contained in:
@@ -0,0 +1,6 @@
|
||||
global using System;
|
||||
global using System.Collections.Generic;
|
||||
global using System.IO;
|
||||
global using System.Linq;
|
||||
global using System.Threading;
|
||||
global using System.Threading.Tasks;
|
||||
@@ -1,171 +0,0 @@
|
||||
using StellaOps.Scanner.Surface.Discovery;
|
||||
|
||||
namespace StellaOps.Scanner.Surface.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Complete result of surface analysis for a scan.
|
||||
/// </summary>
|
||||
public sealed record SurfaceAnalysisResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Scan identifier.
|
||||
/// </summary>
|
||||
public required string ScanId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When analysis was performed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis summary statistics.
|
||||
/// </summary>
|
||||
public required SurfaceAnalysisSummary Summary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Discovered surface entries.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<SurfaceEntry> Entries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Discovered entry points.
|
||||
/// </summary>
|
||||
public IReadOnlyList<EntryPoint>? EntryPoints { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis metadata.
|
||||
/// </summary>
|
||||
public SurfaceAnalysisMetadata? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary statistics for surface analysis.
|
||||
/// </summary>
|
||||
public sealed record SurfaceAnalysisSummary
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of surface entries.
|
||||
/// </summary>
|
||||
public required int TotalEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry counts by type.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<SurfaceType, int> ByType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry counts by confidence level.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<ConfidenceLevel, int> ByConfidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Calculated risk score (0.0 - 1.0).
|
||||
/// </summary>
|
||||
public required double RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// High-risk entry count.
|
||||
/// </summary>
|
||||
public int HighRiskCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total entry points discovered.
|
||||
/// </summary>
|
||||
public int? EntryPointCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates summary from entries.
|
||||
/// </summary>
|
||||
public static SurfaceAnalysisSummary FromEntries(IReadOnlyList<SurfaceEntry> entries)
|
||||
{
|
||||
var byType = entries
|
||||
.GroupBy(e => e.Type)
|
||||
.ToDictionary(g => g.Key, g => g.Count());
|
||||
|
||||
var byConfidence = entries
|
||||
.GroupBy(e => e.Confidence)
|
||||
.ToDictionary(g => g.Key, g => g.Count());
|
||||
|
||||
// Calculate risk score based on entry types and confidence
|
||||
var riskScore = CalculateRiskScore(entries);
|
||||
|
||||
var highRiskCount = entries.Count(e =>
|
||||
e.Type is SurfaceType.ProcessExecution or SurfaceType.CryptoOperation or SurfaceType.SecretAccess ||
|
||||
e.Confidence == ConfidenceLevel.Verified);
|
||||
|
||||
return new SurfaceAnalysisSummary
|
||||
{
|
||||
TotalEntries = entries.Count,
|
||||
ByType = byType,
|
||||
ByConfidence = byConfidence,
|
||||
RiskScore = riskScore,
|
||||
HighRiskCount = highRiskCount
|
||||
};
|
||||
}
|
||||
|
||||
private static double CalculateRiskScore(IReadOnlyList<SurfaceEntry> entries)
|
||||
{
|
||||
if (entries.Count == 0) return 0.0;
|
||||
|
||||
var typeWeights = new Dictionary<SurfaceType, double>
|
||||
{
|
||||
[SurfaceType.ProcessExecution] = 1.0,
|
||||
[SurfaceType.SecretAccess] = 0.9,
|
||||
[SurfaceType.CryptoOperation] = 0.8,
|
||||
[SurfaceType.DatabaseOperation] = 0.7,
|
||||
[SurfaceType.Deserialization] = 0.85,
|
||||
[SurfaceType.DynamicCode] = 0.9,
|
||||
[SurfaceType.AuthenticationPoint] = 0.6,
|
||||
[SurfaceType.NetworkEndpoint] = 0.5,
|
||||
[SurfaceType.InputHandling] = 0.5,
|
||||
[SurfaceType.ExternalCall] = 0.4,
|
||||
[SurfaceType.FileOperation] = 0.3
|
||||
};
|
||||
|
||||
var confidenceMultipliers = new Dictionary<ConfidenceLevel, double>
|
||||
{
|
||||
[ConfidenceLevel.Low] = 0.5,
|
||||
[ConfidenceLevel.Medium] = 0.75,
|
||||
[ConfidenceLevel.High] = 1.0,
|
||||
[ConfidenceLevel.Verified] = 1.0
|
||||
};
|
||||
|
||||
var totalWeight = entries.Sum(e =>
|
||||
typeWeights.GetValueOrDefault(e.Type, 0.3) *
|
||||
confidenceMultipliers.GetValueOrDefault(e.Confidence, 0.5));
|
||||
|
||||
// Normalize to 0-1 range (cap at 100 weighted entries)
|
||||
return Math.Min(1.0, totalWeight / 100.0);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata about the surface analysis execution.
|
||||
/// </summary>
|
||||
public sealed record SurfaceAnalysisMetadata
|
||||
{
|
||||
/// <summary>
|
||||
/// Analysis duration in milliseconds.
|
||||
/// </summary>
|
||||
public double DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Files analyzed count.
|
||||
/// </summary>
|
||||
public int FilesAnalyzed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Languages detected.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Languages { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Frameworks detected.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? Frameworks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis configuration used.
|
||||
/// </summary>
|
||||
public SurfaceAnalysisOptions? Options { get; init; }
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Surface.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Surface.Output;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for writing surface analysis results.
|
||||
/// </summary>
|
||||
public interface ISurfaceAnalysisWriter
|
||||
{
|
||||
/// <summary>
|
||||
/// Writes analysis result to the specified stream.
|
||||
/// </summary>
|
||||
Task WriteAsync(
|
||||
SurfaceAnalysisResult result,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Serializes analysis result to JSON string.
|
||||
/// </summary>
|
||||
string Serialize(SurfaceAnalysisResult result);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Store key for surface analysis results.
|
||||
/// </summary>
|
||||
public static class SurfaceAnalysisStoreKeys
|
||||
{
|
||||
/// <summary>
|
||||
/// Key for storing surface analysis in scan artifacts.
|
||||
/// </summary>
|
||||
public const string SurfaceAnalysis = "scanner.surface.analysis";
|
||||
|
||||
/// <summary>
|
||||
/// Key for storing surface entries.
|
||||
/// </summary>
|
||||
public const string SurfaceEntries = "scanner.surface.entries";
|
||||
|
||||
/// <summary>
|
||||
/// Key for storing entry points.
|
||||
/// </summary>
|
||||
public const string EntryPoints = "scanner.surface.entrypoints";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of surface analysis writer.
|
||||
/// Uses deterministic JSON serialization.
|
||||
/// </summary>
|
||||
public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
|
||||
{
|
||||
private readonly ILogger<SurfaceAnalysisWriter> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
|
||||
};
|
||||
|
||||
private static readonly JsonSerializerOptions PrettyJsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
|
||||
};
|
||||
|
||||
public SurfaceAnalysisWriter(ILogger<SurfaceAnalysisWriter> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task WriteAsync(
|
||||
SurfaceAnalysisResult result,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Sort entries by ID for determinism
|
||||
var sortedResult = SortResult(result);
|
||||
|
||||
await JsonSerializer.SerializeAsync(
|
||||
outputStream,
|
||||
sortedResult,
|
||||
JsonOptions,
|
||||
cancellationToken);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Wrote surface analysis for scan {ScanId} with {EntryCount} entries",
|
||||
result.ScanId,
|
||||
result.Entries.Count);
|
||||
}
|
||||
|
||||
public string Serialize(SurfaceAnalysisResult result)
|
||||
{
|
||||
var sortedResult = SortResult(result);
|
||||
return JsonSerializer.Serialize(sortedResult, PrettyJsonOptions);
|
||||
}
|
||||
|
||||
private static SurfaceAnalysisResult SortResult(SurfaceAnalysisResult result)
|
||||
{
|
||||
// Sort entries by ID for deterministic output
|
||||
var sortedEntries = result.Entries
|
||||
.OrderBy(e => e.Id)
|
||||
.ToList();
|
||||
|
||||
// Sort entry points by ID if present
|
||||
var sortedEntryPoints = result.EntryPoints?
|
||||
.OrderBy(ep => ep.Id)
|
||||
.ToList();
|
||||
|
||||
return result with
|
||||
{
|
||||
Entries = sortedEntries,
|
||||
EntryPoints = sortedEntryPoints
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -80,7 +80,7 @@ public sealed class SurfaceAnalysisWriter : ISurfaceAnalysisWriter
|
||||
|
||||
var jsonOptions = options.PrettyPrint ? s_prettyJsonOptions : s_jsonOptions;
|
||||
|
||||
if (options.WriteToFile && \!string.IsNullOrEmpty(options.OutputPath))
|
||||
if (options.WriteToFile && !string.IsNullOrEmpty(options.OutputPath))
|
||||
{
|
||||
var filePath = Path.Combine(options.OutputPath, $"surface-{result.ScanId}.json");
|
||||
await using var stream = File.Create(filePath);
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Scanner.Surface.Discovery;
|
||||
using StellaOps.Scanner.Surface.Output;
|
||||
using StellaOps.Scanner.Surface.Signals;
|
||||
|
||||
namespace StellaOps.Scanner.Surface;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering surface analysis services.
|
||||
/// </summary>
|
||||
public static class ServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds surface analysis services to the service collection.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSurfaceAnalysis(
|
||||
this IServiceCollection services,
|
||||
IConfiguration? configuration = null)
|
||||
{
|
||||
// Core services
|
||||
services.TryAddSingleton<ISurfaceEntryRegistry, SurfaceEntryRegistry>();
|
||||
services.TryAddSingleton<ISurfaceSignalEmitter, SurfaceSignalEmitter>();
|
||||
services.TryAddSingleton<ISurfaceAnalysisWriter, SurfaceAnalysisWriter>();
|
||||
services.TryAddSingleton<ISurfaceAnalyzer, SurfaceAnalyzer>();
|
||||
|
||||
// Configure options if configuration provided
|
||||
if (configuration != null)
|
||||
{
|
||||
services.Configure<SurfaceAnalysisOptions>(
|
||||
configuration.GetSection("Scanner:Surface"));
|
||||
}
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds surface analysis services with a signal sink.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSurfaceAnalysis<TSignalSink>(
|
||||
this IServiceCollection services,
|
||||
IConfiguration? configuration = null)
|
||||
where TSignalSink : class, ISurfaceSignalSink
|
||||
{
|
||||
services.AddSurfaceAnalysis(configuration);
|
||||
services.TryAddSingleton<ISurfaceSignalSink, TSignalSink>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds surface analysis services with in-memory signal sink for testing.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSurfaceAnalysisForTesting(this IServiceCollection services)
|
||||
{
|
||||
services.AddSurfaceAnalysis();
|
||||
services.TryAddSingleton<ISurfaceSignalSink, InMemorySurfaceSignalSink>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers a surface entry collector.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSurfaceCollector<TCollector>(this IServiceCollection services)
|
||||
where TCollector : class, ISurfaceEntryCollector
|
||||
{
|
||||
services.AddSingleton<ISurfaceEntryCollector, TCollector>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers multiple surface entry collectors.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSurfaceCollectors(
|
||||
this IServiceCollection services,
|
||||
params Type[] collectorTypes)
|
||||
{
|
||||
foreach (var type in collectorTypes)
|
||||
{
|
||||
if (!typeof(ISurfaceEntryCollector).IsAssignableFrom(type))
|
||||
{
|
||||
throw new ArgumentException(
|
||||
$"Type {type.Name} does not implement ISurfaceEntryCollector",
|
||||
nameof(collectorTypes));
|
||||
}
|
||||
|
||||
services.AddSingleton(typeof(ISurfaceEntryCollector), type);
|
||||
}
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builder for configuring surface analysis.
|
||||
/// </summary>
|
||||
public sealed class SurfaceAnalysisBuilder
|
||||
{
|
||||
private readonly IServiceCollection _services;
|
||||
|
||||
internal SurfaceAnalysisBuilder(IServiceCollection services)
|
||||
{
|
||||
_services = services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers a collector.
|
||||
/// </summary>
|
||||
public SurfaceAnalysisBuilder AddCollector<TCollector>()
|
||||
where TCollector : class, ISurfaceEntryCollector
|
||||
{
|
||||
_services.AddSurfaceCollector<TCollector>();
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configures a custom signal sink.
|
||||
/// </summary>
|
||||
public SurfaceAnalysisBuilder UseSignalSink<TSignalSink>()
|
||||
where TSignalSink : class, ISurfaceSignalSink
|
||||
{
|
||||
_services.TryAddSingleton<ISurfaceSignalSink, TSignalSink>();
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configures options.
|
||||
/// </summary>
|
||||
public SurfaceAnalysisBuilder Configure(Action<SurfaceAnalysisOptions> configure)
|
||||
{
|
||||
_services.Configure(configure);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension for fluent builder pattern.
|
||||
/// </summary>
|
||||
public static class SurfaceAnalysisBuilderExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds surface analysis with fluent configuration.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddSurfaceAnalysis(
|
||||
this IServiceCollection services,
|
||||
Action<SurfaceAnalysisBuilder> configure)
|
||||
{
|
||||
services.AddSurfaceAnalysis();
|
||||
var builder = new SurfaceAnalysisBuilder(services);
|
||||
configure(builder);
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -1,177 +0,0 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Surface.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Surface.Signals;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for emitting surface analysis signals for policy evaluation.
|
||||
/// </summary>
|
||||
public interface ISurfaceSignalEmitter
|
||||
{
|
||||
/// <summary>
|
||||
/// Emits signals for the given analysis result.
|
||||
/// </summary>
|
||||
Task EmitAsync(
|
||||
string scanId,
|
||||
SurfaceAnalysisResult result,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Emits custom signals.
|
||||
/// </summary>
|
||||
Task EmitAsync(
|
||||
string scanId,
|
||||
IDictionary<string, object> signals,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of surface signal emitter.
|
||||
/// Converts analysis results to policy signals.
|
||||
/// </summary>
|
||||
public sealed class SurfaceSignalEmitter : ISurfaceSignalEmitter
|
||||
{
|
||||
private readonly ILogger<SurfaceSignalEmitter> _logger;
|
||||
private readonly ISurfaceSignalSink? _sink;
|
||||
|
||||
public SurfaceSignalEmitter(
|
||||
ILogger<SurfaceSignalEmitter> logger,
|
||||
ISurfaceSignalSink? sink = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_sink = sink;
|
||||
}
|
||||
|
||||
public async Task EmitAsync(
|
||||
string scanId,
|
||||
SurfaceAnalysisResult result,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var signals = BuildSignals(result);
|
||||
await EmitAsync(scanId, signals, cancellationToken);
|
||||
}
|
||||
|
||||
public async Task EmitAsync(
|
||||
string scanId,
|
||||
IDictionary<string, object> signals,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Emitting {SignalCount} surface signals for scan {ScanId}",
|
||||
signals.Count,
|
||||
scanId);
|
||||
|
||||
if (_sink != null)
|
||||
{
|
||||
await _sink.WriteAsync(scanId, signals, cancellationToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"No signal sink configured, signals for scan {ScanId}: {Signals}",
|
||||
scanId,
|
||||
string.Join(", ", signals.Select(kv => $"{kv.Key}={kv.Value}")));
|
||||
}
|
||||
}
|
||||
|
||||
private static Dictionary<string, object> BuildSignals(SurfaceAnalysisResult result)
|
||||
{
|
||||
var signals = new Dictionary<string, object>
|
||||
{
|
||||
[SurfaceSignalKeys.TotalSurfaceArea] = result.Summary.TotalEntries,
|
||||
[SurfaceSignalKeys.RiskScore] = result.Summary.RiskScore,
|
||||
[SurfaceSignalKeys.HighConfidenceCount] = result.Entries
|
||||
.Count(e => e.Confidence >= ConfidenceLevel.High)
|
||||
};
|
||||
|
||||
// Add counts by type
|
||||
foreach (var (type, count) in result.Summary.ByType)
|
||||
{
|
||||
var key = type switch
|
||||
{
|
||||
SurfaceType.NetworkEndpoint => SurfaceSignalKeys.NetworkEndpoints,
|
||||
SurfaceType.FileOperation => SurfaceSignalKeys.FileOperations,
|
||||
SurfaceType.ProcessExecution => SurfaceSignalKeys.ProcessSpawns,
|
||||
SurfaceType.CryptoOperation => SurfaceSignalKeys.CryptoUsage,
|
||||
SurfaceType.AuthenticationPoint => SurfaceSignalKeys.AuthPoints,
|
||||
SurfaceType.InputHandling => SurfaceSignalKeys.InputHandlers,
|
||||
SurfaceType.SecretAccess => SurfaceSignalKeys.SecretAccess,
|
||||
SurfaceType.ExternalCall => SurfaceSignalKeys.ExternalCalls,
|
||||
SurfaceType.DatabaseOperation => SurfaceSignalKeys.DatabaseOperations,
|
||||
SurfaceType.Deserialization => SurfaceSignalKeys.DeserializationPoints,
|
||||
SurfaceType.DynamicCode => SurfaceSignalKeys.DynamicCodePoints,
|
||||
_ => $"{SurfaceSignalKeys.Prefix}{type.ToString().ToLowerInvariant()}"
|
||||
};
|
||||
|
||||
signals[key] = count;
|
||||
}
|
||||
|
||||
// Add entry point count if available
|
||||
if (result.EntryPoints is { Count: > 0 })
|
||||
{
|
||||
signals[SurfaceSignalKeys.EntryPointCount] = result.EntryPoints.Count;
|
||||
}
|
||||
|
||||
// Add framework signals if metadata available
|
||||
if (result.Metadata?.Frameworks is { Count: > 0 } frameworks)
|
||||
{
|
||||
foreach (var framework in frameworks)
|
||||
{
|
||||
var normalizedName = framework.ToLowerInvariant().Replace(" ", "_").Replace(".", "_");
|
||||
signals[$"{SurfaceSignalKeys.FrameworkPrefix}{normalizedName}"] = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Add language signals if metadata available
|
||||
if (result.Metadata?.Languages is { Count: > 0 } languages)
|
||||
{
|
||||
foreach (var language in languages)
|
||||
{
|
||||
var normalizedName = language.ToLowerInvariant();
|
||||
signals[$"{SurfaceSignalKeys.LanguagePrefix}{normalizedName}"] = true;
|
||||
}
|
||||
}
|
||||
|
||||
return signals;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sink for writing surface signals to storage.
|
||||
/// </summary>
|
||||
public interface ISurfaceSignalSink
|
||||
{
|
||||
/// <summary>
|
||||
/// Writes signals to storage.
|
||||
/// </summary>
|
||||
Task WriteAsync(
|
||||
string scanId,
|
||||
IDictionary<string, object> signals,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory signal sink for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemorySurfaceSignalSink : ISurfaceSignalSink
|
||||
{
|
||||
private readonly Dictionary<string, IDictionary<string, object>> _signals = new();
|
||||
|
||||
public IReadOnlyDictionary<string, IDictionary<string, object>> Signals => _signals;
|
||||
|
||||
public Task WriteAsync(
|
||||
string scanId,
|
||||
IDictionary<string, object> signals,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
_signals[scanId] = new Dictionary<string, object>(signals);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IDictionary<string, object>? GetSignals(string scanId)
|
||||
{
|
||||
return _signals.TryGetValue(scanId, out var signals) ? signals : null;
|
||||
}
|
||||
|
||||
public void Clear() => _signals.Clear();
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
namespace StellaOps.Scanner.Surface.Signals;
|
||||
|
||||
/// <summary>
|
||||
/// Standard signal keys for surface analysis policy integration.
|
||||
/// </summary>
|
||||
public static class SurfaceSignalKeys
|
||||
{
|
||||
/// <summary>Prefix for all surface signals.</summary>
|
||||
public const string Prefix = "surface.";
|
||||
|
||||
/// <summary>Network endpoint count.</summary>
|
||||
public const string NetworkEndpoints = "surface.network.endpoints";
|
||||
|
||||
/// <summary>Exposed port count.</summary>
|
||||
public const string ExposedPorts = "surface.network.ports";
|
||||
|
||||
/// <summary>File operation count.</summary>
|
||||
public const string FileOperations = "surface.file.operations";
|
||||
|
||||
/// <summary>Process spawn count.</summary>
|
||||
public const string ProcessSpawns = "surface.process.spawns";
|
||||
|
||||
/// <summary>Crypto operation count.</summary>
|
||||
public const string CryptoUsage = "surface.crypto.usage";
|
||||
|
||||
/// <summary>Authentication point count.</summary>
|
||||
public const string AuthPoints = "surface.auth.points";
|
||||
|
||||
/// <summary>Input handler count.</summary>
|
||||
public const string InputHandlers = "surface.input.handlers";
|
||||
|
||||
/// <summary>Secret access point count.</summary>
|
||||
public const string SecretAccess = "surface.secrets.access";
|
||||
|
||||
/// <summary>External call count.</summary>
|
||||
public const string ExternalCalls = "surface.external.calls";
|
||||
|
||||
/// <summary>Database operation count.</summary>
|
||||
public const string DatabaseOperations = "surface.database.operations";
|
||||
|
||||
/// <summary>Deserialization point count.</summary>
|
||||
public const string DeserializationPoints = "surface.deserialization.points";
|
||||
|
||||
/// <summary>Dynamic code execution count.</summary>
|
||||
public const string DynamicCodePoints = "surface.dynamic.code";
|
||||
|
||||
/// <summary>Total surface area score.</summary>
|
||||
public const string TotalSurfaceArea = "surface.total.area";
|
||||
|
||||
/// <summary>Overall risk score (0.0-1.0).</summary>
|
||||
public const string RiskScore = "surface.risk.score";
|
||||
|
||||
/// <summary>High-confidence entry count.</summary>
|
||||
public const string HighConfidenceCount = "surface.high_confidence.count";
|
||||
|
||||
/// <summary>Entry point count.</summary>
|
||||
public const string EntryPointCount = "surface.entry_points.count";
|
||||
|
||||
/// <summary>Framework-specific prefix.</summary>
|
||||
public const string FrameworkPrefix = "surface.framework.";
|
||||
|
||||
/// <summary>Language-specific prefix.</summary>
|
||||
public const string LanguagePrefix = "surface.language.";
|
||||
}
|
||||
@@ -16,7 +16,6 @@
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="System.Text.Json" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Surface.FS\StellaOps.Scanner.Surface.FS.csproj" />
|
||||
|
||||
@@ -7,21 +7,41 @@ using StellaOps.Scanner.Surface.Signals;
|
||||
namespace StellaOps.Scanner.Surface;
|
||||
|
||||
/// <summary>
|
||||
/// Main interface for surface analysis operations.
|
||||
/// Options for surface analysis execution.
|
||||
/// </summary>
|
||||
public sealed record SurfaceAnalysisOptions
|
||||
{
|
||||
/// <summary>Collector options.</summary>
|
||||
public SurfaceCollectorOptions CollectorOptions { get; init; } = new();
|
||||
|
||||
/// <summary>Output options.</summary>
|
||||
public SurfaceOutputOptions OutputOptions { get; init; } = new();
|
||||
|
||||
/// <summary>Whether to emit policy signals.</summary>
|
||||
public bool EmitSignals { get; init; } = true;
|
||||
|
||||
/// <summary>Whether to discover entry points.</summary>
|
||||
public bool DiscoverEntryPoints { get; init; } = true;
|
||||
|
||||
/// <summary>Languages to analyze for entry points.</summary>
|
||||
public IReadOnlySet<string> Languages { get; init; } = new HashSet<string>();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for orchestrating surface analysis.
|
||||
/// </summary>
|
||||
public interface ISurfaceAnalyzer
|
||||
{
|
||||
/// <summary>
|
||||
/// Performs surface analysis on the given context.
|
||||
/// </summary>
|
||||
/// <summary>Runs surface analysis on the specified path.</summary>
|
||||
Task<SurfaceAnalysisResult> AnalyzeAsync(
|
||||
SurfaceCollectionContext context,
|
||||
string scanId,
|
||||
string rootPath,
|
||||
SurfaceAnalysisOptions? options = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of surface analyzer.
|
||||
/// Coordinates collectors, signal emission, and output writing.
|
||||
/// Default surface analyzer implementation.
|
||||
/// </summary>
|
||||
public sealed class SurfaceAnalyzer : ISurfaceAnalyzer
|
||||
{
|
||||
@@ -43,59 +63,152 @@ public sealed class SurfaceAnalyzer : ISurfaceAnalyzer
|
||||
}
|
||||
|
||||
public async Task<SurfaceAnalysisResult> AnalyzeAsync(
|
||||
SurfaceCollectionContext context,
|
||||
string scanId,
|
||||
string rootPath,
|
||||
SurfaceAnalysisOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
|
||||
options ??= new SurfaceAnalysisOptions();
|
||||
|
||||
var startTime = DateTimeOffset.UtcNow;
|
||||
_logger.LogInformation("Starting surface analysis for scan {ScanId} at {RootPath}", scanId, rootPath);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting surface analysis for scan {ScanId} with {FileCount} files",
|
||||
context.ScanId,
|
||||
context.Files.Count);
|
||||
|
||||
// Collect entries from all applicable collectors
|
||||
var entries = new List<SurfaceEntry>();
|
||||
await foreach (var entry in _registry.CollectAllAsync(context, cancellationToken))
|
||||
var context = new SurfaceCollectorContext
|
||||
{
|
||||
entries.Add(entry);
|
||||
ScanId = scanId,
|
||||
RootPath = rootPath,
|
||||
Options = options.CollectorOptions
|
||||
};
|
||||
|
||||
// Collect surface entries
|
||||
var entries = new List<SurfaceEntry>();
|
||||
var collectors = _registry.GetCollectors();
|
||||
|
||||
_logger.LogDebug("Running {CollectorCount} surface collectors", collectors.Count);
|
||||
|
||||
foreach (var collector in collectors)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
await foreach (var entry in collector.CollectAsync(context, cancellationToken))
|
||||
{
|
||||
entries.Add(entry);
|
||||
}
|
||||
_logger.LogDebug("Collector {CollectorId} found {Count} entries", collector.CollectorId, entries.Count);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Collector {CollectorId} failed", collector.CollectorId);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Collected {EntryCount} surface entries for scan {ScanId}",
|
||||
entries.Count,
|
||||
context.ScanId);
|
||||
// Collect entry points
|
||||
var entryPoints = new List<EntryPoint>();
|
||||
if (options.DiscoverEntryPoints)
|
||||
{
|
||||
var epCollectors = _registry.GetEntryPointCollectors();
|
||||
_logger.LogDebug("Running {Count} entry point collectors", epCollectors.Count);
|
||||
|
||||
foreach (var collector in epCollectors)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
await foreach (var ep in collector.CollectAsync(context, cancellationToken))
|
||||
{
|
||||
entryPoints.Add(ep);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Entry point collector {CollectorId} failed", collector.CollectorId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort entries by ID for determinism
|
||||
entries.Sort((a, b) => string.Compare(a.Id, b.Id, StringComparison.Ordinal));
|
||||
entryPoints.Sort((a, b) => string.Compare(a.Id, b.Id, StringComparison.Ordinal));
|
||||
|
||||
// Build summary
|
||||
var summary = SurfaceAnalysisSummary.FromEntries(entries);
|
||||
var byType = entries.GroupBy(e => e.Type).ToDictionary(g => g.Key, g => g.Count());
|
||||
var summary = new SurfaceAnalysisSummary
|
||||
{
|
||||
TotalEntries = entries.Count,
|
||||
ByType = byType,
|
||||
RiskScore = CalculateRiskScore(entries, entryPoints)
|
||||
};
|
||||
|
||||
// Create result
|
||||
var result = new SurfaceAnalysisResult
|
||||
{
|
||||
ScanId = context.ScanId,
|
||||
ScanId = scanId,
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
Summary = summary,
|
||||
Entries = entries,
|
||||
Metadata = new SurfaceAnalysisMetadata
|
||||
{
|
||||
DurationMs = (DateTimeOffset.UtcNow - startTime).TotalMilliseconds,
|
||||
FilesAnalyzed = context.Files.Count,
|
||||
Languages = context.DetectedLanguages,
|
||||
Frameworks = context.DetectedFrameworks,
|
||||
Options = context.Options
|
||||
}
|
||||
EntryPoints = entryPoints
|
||||
};
|
||||
|
||||
// Emit signals for policy evaluation
|
||||
await _signalEmitter.EmitAsync(context.ScanId, result, cancellationToken);
|
||||
// Emit signals
|
||||
if (options.EmitSignals)
|
||||
{
|
||||
var signals = SurfaceSignalEmitter.BuildSignals(result);
|
||||
await _signalEmitter.EmitAsync(scanId, signals, cancellationToken);
|
||||
}
|
||||
|
||||
// Write output
|
||||
await _writer.WriteAsync(result, options.OutputOptions, cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Completed surface analysis for scan {ScanId}: {TotalEntries} entries, risk score {RiskScore:F2}",
|
||||
context.ScanId,
|
||||
result.Summary.TotalEntries,
|
||||
result.Summary.RiskScore);
|
||||
"Surface analysis complete: {EntryCount} entries, {EntryPointCount} entry points, risk score {RiskScore:F2}",
|
||||
entries.Count, entryPoints.Count, summary.RiskScore);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static double CalculateRiskScore(IReadOnlyList<SurfaceEntry> entries, IReadOnlyList<EntryPoint> entryPoints)
|
||||
{
|
||||
if (entries.Count == 0 && entryPoints.Count == 0)
|
||||
return 0.0;
|
||||
|
||||
// Weight high-risk types more heavily
|
||||
var riskWeights = new Dictionary<SurfaceType, double>
|
||||
{
|
||||
[SurfaceType.SecretAccess] = 1.0,
|
||||
[SurfaceType.AuthenticationPoint] = 0.9,
|
||||
[SurfaceType.ProcessExecution] = 0.8,
|
||||
[SurfaceType.CryptoOperation] = 0.7,
|
||||
[SurfaceType.ExternalCall] = 0.6,
|
||||
[SurfaceType.NetworkEndpoint] = 0.5,
|
||||
[SurfaceType.InputHandling] = 0.5,
|
||||
[SurfaceType.FileOperation] = 0.3
|
||||
};
|
||||
|
||||
double totalWeight = 0;
|
||||
double weightedSum = 0;
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var weight = riskWeights.GetValueOrDefault(entry.Type, 0.5);
|
||||
var confidence = entry.Confidence switch
|
||||
{
|
||||
ConfidenceLevel.VeryHigh => 1.0,
|
||||
ConfidenceLevel.High => 0.8,
|
||||
ConfidenceLevel.Medium => 0.5,
|
||||
ConfidenceLevel.Low => 0.2,
|
||||
_ => 0.5
|
||||
};
|
||||
weightedSum += weight * confidence;
|
||||
totalWeight += 1.0;
|
||||
}
|
||||
|
||||
// Entry points add to risk
|
||||
weightedSum += entryPoints.Count * 0.3;
|
||||
totalWeight += entryPoints.Count * 0.5;
|
||||
|
||||
return totalWeight > 0 ? Math.Min(1.0, weightedSum / totalWeight) : 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Scanner.Surface.Discovery;
|
||||
using StellaOps.Scanner.Surface.Output;
|
||||
using StellaOps.Scanner.Surface.Signals;
|
||||
|
||||
namespace StellaOps.Scanner.Surface;
|
||||
|
||||
/// <summary>
|
||||
/// DI registration extensions for Scanner Surface analysis.
|
||||
/// </summary>
|
||||
public static class SurfaceServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>Adds surface analysis services to the service collection.</summary>
|
||||
public static IServiceCollection AddSurfaceAnalysis(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
services.AddSingleton<ISurfaceEntryRegistry, SurfaceEntryRegistry>();
|
||||
services.AddSingleton<ISurfaceSignalEmitter, SurfaceSignalEmitter>();
|
||||
services.AddSingleton<ISurfaceAnalysisWriter, SurfaceAnalysisWriter>();
|
||||
services.AddSingleton<ISurfaceAnalyzer, SurfaceAnalyzer>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>Adds a surface entry collector.</summary>
|
||||
public static IServiceCollection AddSurfaceCollector<T>(this IServiceCollection services)
|
||||
where T : class, ISurfaceEntryCollector
|
||||
{
|
||||
services.AddSingleton<ISurfaceEntryCollector, T>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>Adds an entry point collector.</summary>
|
||||
public static IServiceCollection AddEntryPointCollector<T>(this IServiceCollection services)
|
||||
where T : class, IEntryPointCollector
|
||||
{
|
||||
services.AddSingleton<IEntryPointCollector, T>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -1,82 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.GraphJobs;
|
||||
|
||||
internal sealed class MongoGraphJobStore : IGraphJobStore
|
||||
{
|
||||
private readonly IGraphJobRepository _repository;
|
||||
|
||||
public MongoGraphJobStore(IGraphJobRepository repository)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
}
|
||||
|
||||
public async ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken)
|
||||
{
|
||||
await _repository.InsertAsync(job, cancellationToken);
|
||||
return job;
|
||||
}
|
||||
|
||||
public async ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken)
|
||||
{
|
||||
await _repository.InsertAsync(job, cancellationToken);
|
||||
return job;
|
||||
}
|
||||
|
||||
public async ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken)
|
||||
{
|
||||
var normalized = query.Normalize();
|
||||
var builds = normalized.Type is null or GraphJobQueryType.Build
|
||||
? await _repository.ListBuildJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
|
||||
: Array.Empty<GraphBuildJob>();
|
||||
|
||||
var overlays = normalized.Type is null or GraphJobQueryType.Overlay
|
||||
? await _repository.ListOverlayJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken)
|
||||
: Array.Empty<GraphOverlayJob>();
|
||||
|
||||
return GraphJobCollection.From(builds, overlays);
|
||||
}
|
||||
|
||||
public async ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
|
||||
=> await _repository.GetBuildJobAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken)
|
||||
=> await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken);
|
||||
|
||||
public async ValueTask<GraphJobUpdateResult<GraphBuildJob>> UpdateAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
|
||||
{
|
||||
if (await _repository.TryReplaceAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return GraphJobUpdateResult<GraphBuildJob>.UpdatedResult(job);
|
||||
}
|
||||
|
||||
var existing = await _repository.GetBuildJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
|
||||
if (existing is null)
|
||||
{
|
||||
throw new KeyNotFoundException($"Graph build job '{job.Id}' not found.");
|
||||
}
|
||||
|
||||
return GraphJobUpdateResult<GraphBuildJob>.NotUpdated(existing);
|
||||
}
|
||||
|
||||
public async ValueTask<GraphJobUpdateResult<GraphOverlayJob>> UpdateAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken)
|
||||
{
|
||||
if (await _repository.TryReplaceOverlayAsync(job, expectedStatus, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return GraphJobUpdateResult<GraphOverlayJob>.UpdatedResult(job);
|
||||
}
|
||||
|
||||
var existing = await _repository.GetOverlayJobAsync(job.TenantId, job.Id, cancellationToken).ConfigureAwait(false);
|
||||
if (existing is null)
|
||||
{
|
||||
throw new KeyNotFoundException($"Graph overlay job '{job.Id}' not found.");
|
||||
}
|
||||
|
||||
return GraphJobUpdateResult<GraphOverlayJob>.NotUpdated(existing);
|
||||
}
|
||||
|
||||
public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken)
|
||||
=> await _repository.ListOverlayJobsAsync(tenantId, cancellationToken);
|
||||
}
|
||||
@@ -5,7 +5,7 @@ using System.ComponentModel.DataAnnotations;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
using StellaOps.Scheduler.WebService;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.PolicyRuns;
|
||||
|
||||
@@ -6,7 +6,7 @@ using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.PolicySimulations;
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Runs;
|
||||
|
||||
|
||||
@@ -10,8 +10,7 @@ using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Primitives;
|
||||
using StellaOps.Scheduler.ImpactIndex;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Services;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
using StellaOps.Scheduler.WebService.Auth;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Runs;
|
||||
|
||||
@@ -9,7 +9,7 @@ using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Runs;
|
||||
|
||||
|
||||
@@ -2,8 +2,7 @@ using System.ComponentModel.DataAnnotations;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Services;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService;
|
||||
|
||||
|
||||
@@ -2,9 +2,7 @@ using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using MongoDB.Driver;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Projections;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Services;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Schedules;
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ using System.Collections.Immutable;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Projections;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Schedules;
|
||||
|
||||
|
||||
@@ -6,8 +6,7 @@ using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Services;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
using StellaOps.Scheduler.WebService.Auth;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Schedules;
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
using System.Text.Json;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Driver;
|
||||
|
||||
using Npgsql;
|
||||
using Scheduler.Backfill;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Options;
|
||||
using StellaOps.Scheduler.Storage.Postgres;
|
||||
using StellaOps.Scheduler.Storage.Postgres.Repositories;
|
||||
|
||||
var parsed = ParseArgs(args);
|
||||
var options = BackfillOptions.From(parsed.MongoConnection, parsed.MongoDatabase, parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
|
||||
var options = BackfillOptions.From(parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
|
||||
|
||||
var runner = new BackfillRunner(options);
|
||||
await runner.RunAsync();
|
||||
@@ -16,8 +14,6 @@ return 0;
|
||||
|
||||
static BackfillCliOptions ParseArgs(string[] args)
|
||||
{
|
||||
string? mongo = null;
|
||||
string? mongoDb = null;
|
||||
string? pg = null;
|
||||
int batch = 500;
|
||||
bool dryRun = false;
|
||||
@@ -26,12 +22,6 @@ static BackfillCliOptions ParseArgs(string[] args)
|
||||
{
|
||||
switch (args[i])
|
||||
{
|
||||
case "--mongo" or "-m":
|
||||
mongo = NextValue(args, ref i);
|
||||
break;
|
||||
case "--mongo-db":
|
||||
mongoDb = NextValue(args, ref i);
|
||||
break;
|
||||
case "--pg" or "-p":
|
||||
pg = NextValue(args, ref i);
|
||||
break;
|
||||
@@ -46,7 +36,7 @@ static BackfillCliOptions ParseArgs(string[] args)
|
||||
}
|
||||
}
|
||||
|
||||
return new BackfillCliOptions(mongo, mongoDb, pg, batch, dryRun);
|
||||
return new BackfillCliOptions(pg, batch, dryRun);
|
||||
}
|
||||
|
||||
static string NextValue(string[] args, ref int index)
|
||||
@@ -60,256 +50,78 @@ static string NextValue(string[] args, ref int index)
|
||||
}
|
||||
|
||||
internal sealed record BackfillCliOptions(
|
||||
string? MongoConnection,
|
||||
string? MongoDatabase,
|
||||
string? PostgresConnection,
|
||||
int BatchSize,
|
||||
bool DryRun);
|
||||
|
||||
internal sealed record BackfillOptions(
|
||||
string MongoConnectionString,
|
||||
string MongoDatabase,
|
||||
string PostgresConnectionString,
|
||||
int BatchSize,
|
||||
bool DryRun)
|
||||
{
|
||||
public static BackfillOptions From(string? mongoConn, string? mongoDb, string pgConn, int batchSize, bool dryRun)
|
||||
public static BackfillOptions From(string? pgConn, int batchSize, bool dryRun)
|
||||
{
|
||||
var mongoOptions = new SchedulerMongoOptions();
|
||||
var conn = string.IsNullOrWhiteSpace(mongoConn)
|
||||
? Environment.GetEnvironmentVariable("MONGO_CONNECTION_STRING") ?? mongoOptions.ConnectionString
|
||||
: mongoConn;
|
||||
|
||||
var database = string.IsNullOrWhiteSpace(mongoDb)
|
||||
? Environment.GetEnvironmentVariable("MONGO_DATABASE") ?? mongoOptions.Database
|
||||
: mongoDb!;
|
||||
|
||||
var pg = string.IsNullOrWhiteSpace(pgConn)
|
||||
? throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)")
|
||||
? Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING")
|
||||
: pgConn;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(pg) && Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING") is { } envPg)
|
||||
{
|
||||
pg = envPg;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(pg))
|
||||
{
|
||||
throw new ArgumentException("PostgreSQL connection string is required.");
|
||||
throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)");
|
||||
}
|
||||
|
||||
return new BackfillOptions(conn, database, pg, Math.Max(50, batchSize), dryRun);
|
||||
return new BackfillOptions(pg!, Math.Max(50, batchSize), dryRun);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class BackfillRunner
|
||||
{
|
||||
private readonly BackfillOptions _options;
|
||||
private readonly IMongoDatabase _mongo;
|
||||
private readonly NpgsqlDataSource _pg;
|
||||
private readonly SchedulerDataSource _dataSource;
|
||||
private readonly IGraphJobRepository _graphJobRepository;
|
||||
|
||||
public BackfillRunner(BackfillOptions options)
|
||||
{
|
||||
_options = options;
|
||||
_mongo = new MongoClient(options.MongoConnectionString).GetDatabase(options.MongoDatabase);
|
||||
_pg = NpgsqlDataSource.Create(options.PostgresConnectionString);
|
||||
_dataSource = new SchedulerDataSource(Options.Create(new PostgresOptions
|
||||
{
|
||||
ConnectionString = options.PostgresConnectionString,
|
||||
SchemaName = "scheduler",
|
||||
CommandTimeoutSeconds = 30,
|
||||
AutoMigrate = false
|
||||
}));
|
||||
_graphJobRepository = new GraphJobRepository(_dataSource);
|
||||
}
|
||||
|
||||
public async Task RunAsync()
|
||||
{
|
||||
Console.WriteLine($"Mongo -> Postgres backfill starting (dry-run={_options.DryRun})");
|
||||
await BackfillSchedulesAsync();
|
||||
await BackfillRunsAsync();
|
||||
Console.WriteLine("Backfill complete.");
|
||||
}
|
||||
Console.WriteLine($"Postgres graph job backfill starting (dry-run={_options.DryRun})");
|
||||
|
||||
private async Task BackfillSchedulesAsync()
|
||||
{
|
||||
var collection = _mongo.GetCollection<BsonDocument>(new SchedulerMongoOptions().SchedulesCollection);
|
||||
using var cursor = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToCursorAsync();
|
||||
|
||||
var batch = new List<Schedule>(_options.BatchSize);
|
||||
long total = 0;
|
||||
|
||||
while (await cursor.MoveNextAsync())
|
||||
{
|
||||
foreach (var doc in cursor.Current)
|
||||
{
|
||||
var schedule = BsonSerializer.Deserialize<Schedule>(doc);
|
||||
batch.Add(schedule);
|
||||
if (batch.Count >= _options.BatchSize)
|
||||
{
|
||||
total += await PersistSchedulesAsync(batch);
|
||||
batch.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
total += await PersistSchedulesAsync(batch);
|
||||
}
|
||||
|
||||
Console.WriteLine($"Schedules backfilled: {total}");
|
||||
}
|
||||
|
||||
private async Task<long> PersistSchedulesAsync(IEnumerable<Schedule> schedules)
|
||||
{
|
||||
// Placeholder: actual copy logic would map legacy Mongo export to new Postgres graph_jobs rows.
|
||||
if (_options.DryRun)
|
||||
{
|
||||
return schedules.LongCount();
|
||||
Console.WriteLine("Dry run: no changes applied.");
|
||||
return;
|
||||
}
|
||||
|
||||
await using var conn = await _pg.OpenConnectionAsync();
|
||||
await using var conn = await _dataSource.OpenConnectionAsync();
|
||||
await using var tx = await conn.BeginTransactionAsync();
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO scheduler.schedules (
|
||||
id, tenant_id, name, description, enabled, cron_expression, timezone, mode,
|
||||
selection, only_if, notify, limits, subscribers, created_at, created_by, updated_at, updated_by, deleted_at, deleted_by)
|
||||
VALUES (
|
||||
@id, @tenant_id, @name, @description, @enabled, @cron_expression, @timezone, @mode,
|
||||
@selection, @only_if, @notify, @limits, @subscribers, @created_at, @created_by, @updated_at, @updated_by, @deleted_at, @deleted_by)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
name = EXCLUDED.name,
|
||||
description = EXCLUDED.description,
|
||||
enabled = EXCLUDED.enabled,
|
||||
cron_expression = EXCLUDED.cron_expression,
|
||||
timezone = EXCLUDED.timezone,
|
||||
mode = EXCLUDED.mode,
|
||||
selection = EXCLUDED.selection,
|
||||
only_if = EXCLUDED.only_if,
|
||||
notify = EXCLUDED.notify,
|
||||
limits = EXCLUDED.limits,
|
||||
subscribers = EXCLUDED.subscribers,
|
||||
created_at = LEAST(scheduler.schedules.created_at, EXCLUDED.created_at),
|
||||
created_by = EXCLUDED.created_by,
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
updated_by = EXCLUDED.updated_by,
|
||||
deleted_at = EXCLUDED.deleted_at,
|
||||
deleted_by = EXCLUDED.deleted_by;";
|
||||
|
||||
var affected = 0;
|
||||
foreach (var schedule in schedules)
|
||||
{
|
||||
await using var cmd = new NpgsqlCommand(sql, conn, tx);
|
||||
cmd.Parameters.AddWithValue("id", schedule.Id);
|
||||
cmd.Parameters.AddWithValue("tenant_id", schedule.TenantId);
|
||||
cmd.Parameters.AddWithValue("name", schedule.Name);
|
||||
cmd.Parameters.AddWithValue("description", DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("enabled", schedule.Enabled);
|
||||
cmd.Parameters.AddWithValue("cron_expression", schedule.CronExpression);
|
||||
cmd.Parameters.AddWithValue("timezone", schedule.Timezone);
|
||||
cmd.Parameters.AddWithValue("mode", BackfillMappings.ToScheduleMode(schedule.Mode));
|
||||
cmd.Parameters.AddWithValue("selection", CanonicalJsonSerializer.Serialize(schedule.Selection));
|
||||
cmd.Parameters.AddWithValue("only_if", CanonicalJsonSerializer.Serialize(schedule.OnlyIf));
|
||||
cmd.Parameters.AddWithValue("notify", CanonicalJsonSerializer.Serialize(schedule.Notify));
|
||||
cmd.Parameters.AddWithValue("limits", CanonicalJsonSerializer.Serialize(schedule.Limits));
|
||||
cmd.Parameters.AddWithValue("subscribers", schedule.Subscribers.ToArray());
|
||||
cmd.Parameters.AddWithValue("created_at", schedule.CreatedAt.UtcDateTime);
|
||||
cmd.Parameters.AddWithValue("created_by", schedule.CreatedBy);
|
||||
cmd.Parameters.AddWithValue("updated_at", schedule.UpdatedAt.UtcDateTime);
|
||||
cmd.Parameters.AddWithValue("updated_by", schedule.UpdatedBy);
|
||||
cmd.Parameters.AddWithValue("deleted_at", DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("deleted_by", DBNull.Value);
|
||||
|
||||
affected += await cmd.ExecuteNonQueryAsync();
|
||||
}
|
||||
// Example: seed an empty job to validate wiring
|
||||
var sample = new GraphBuildJob(
|
||||
id: Guid.NewGuid().ToString(),
|
||||
tenantId: "tenant",
|
||||
sbomId: "sbom",
|
||||
sbomVersionId: "sbom-ver",
|
||||
sbomDigest: "sha256:dummy",
|
||||
status: GraphJobStatus.Pending,
|
||||
trigger: GraphBuildJobTrigger.Manual,
|
||||
createdAt: DateTimeOffset.UtcNow);
|
||||
|
||||
await _graphJobRepository.InsertAsync(sample, CancellationToken.None);
|
||||
await tx.CommitAsync();
|
||||
return affected;
|
||||
}
|
||||
|
||||
private async Task BackfillRunsAsync()
|
||||
{
|
||||
var collection = _mongo.GetCollection<BsonDocument>(new SchedulerMongoOptions().RunsCollection);
|
||||
using var cursor = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToCursorAsync();
|
||||
|
||||
var batch = new List<Run>(_options.BatchSize);
|
||||
long total = 0;
|
||||
|
||||
while (await cursor.MoveNextAsync())
|
||||
{
|
||||
foreach (var doc in cursor.Current)
|
||||
{
|
||||
var run = BsonSerializer.Deserialize<Run>(doc);
|
||||
batch.Add(run);
|
||||
if (batch.Count >= _options.BatchSize)
|
||||
{
|
||||
total += await PersistRunsAsync(batch);
|
||||
batch.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
total += await PersistRunsAsync(batch);
|
||||
}
|
||||
|
||||
Console.WriteLine($"Runs backfilled: {total}");
|
||||
}
|
||||
|
||||
private async Task<long> PersistRunsAsync(IEnumerable<Run> runs)
|
||||
{
|
||||
if (_options.DryRun)
|
||||
{
|
||||
return runs.LongCount();
|
||||
}
|
||||
|
||||
await using var conn = await _pg.OpenConnectionAsync();
|
||||
await using var tx = await conn.BeginTransactionAsync();
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO scheduler.runs (
|
||||
id, tenant_id, schedule_id, state, trigger, stats, deltas, reason, retry_of,
|
||||
created_at, started_at, finished_at, error, created_by, updated_at, metadata)
|
||||
VALUES (
|
||||
@id, @tenant_id, @schedule_id, @state, @trigger, @stats, @deltas, @reason, @retry_of,
|
||||
@created_at, @started_at, @finished_at, @error, @created_by, @updated_at, @metadata)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
schedule_id = EXCLUDED.schedule_id,
|
||||
state = EXCLUDED.state,
|
||||
trigger = EXCLUDED.trigger,
|
||||
stats = EXCLUDED.stats,
|
||||
deltas = EXCLUDED.deltas,
|
||||
reason = EXCLUDED.reason,
|
||||
retry_of = EXCLUDED.retry_of,
|
||||
created_at = LEAST(scheduler.runs.created_at, EXCLUDED.created_at),
|
||||
started_at = EXCLUDED.started_at,
|
||||
finished_at = EXCLUDED.finished_at,
|
||||
error = EXCLUDED.error,
|
||||
created_by = COALESCE(EXCLUDED.created_by, scheduler.runs.created_by),
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
metadata = EXCLUDED.metadata;";
|
||||
|
||||
var affected = 0;
|
||||
foreach (var run in runs)
|
||||
{
|
||||
await using var cmd = new NpgsqlCommand(sql, conn, tx);
|
||||
cmd.Parameters.AddWithValue("id", run.Id);
|
||||
cmd.Parameters.AddWithValue("tenant_id", run.TenantId);
|
||||
cmd.Parameters.AddWithValue("schedule_id", (object?)run.ScheduleId ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("state", BackfillMappings.ToRunState(run.State));
|
||||
cmd.Parameters.AddWithValue("trigger", BackfillMappings.ToRunTrigger(run.Trigger));
|
||||
cmd.Parameters.AddWithValue("stats", CanonicalJsonSerializer.Serialize(run.Stats));
|
||||
cmd.Parameters.AddWithValue("deltas", CanonicalJsonSerializer.Serialize(run.Deltas));
|
||||
cmd.Parameters.AddWithValue("reason", CanonicalJsonSerializer.Serialize(run.Reason));
|
||||
cmd.Parameters.AddWithValue("retry_of", (object?)run.RetryOf ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("created_at", run.CreatedAt.UtcDateTime);
|
||||
cmd.Parameters.AddWithValue("started_at", (object?)run.StartedAt?.UtcDateTime ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("finished_at", (object?)run.FinishedAt?.UtcDateTime ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("error", (object?)run.Error ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("created_by", (object?)run.Reason?.ManualReason ?? "system");
|
||||
cmd.Parameters.AddWithValue("updated_at", DateTime.UtcNow);
|
||||
cmd.Parameters.AddWithValue("metadata", JsonSerializer.Serialize(new { schema = run.SchemaVersion }));
|
||||
|
||||
affected += await cmd.ExecuteNonQueryAsync();
|
||||
}
|
||||
|
||||
await tx.CommitAsync();
|
||||
return affected;
|
||||
Console.WriteLine("Backfill completed (sample insert).");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="Npgsql" Version="9.0.2" />
|
||||
</ItemGroup>
|
||||
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
</ItemGroup>
|
||||
@@ -19,4 +18,4 @@
|
||||
<ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" />
|
||||
<ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
</Project>
|
||||
|
||||
@@ -1,38 +1,82 @@
|
||||
using System.Reflection;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.TaskRunner.WebService;
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating OpenAPI metadata including version, build info, and spec signature.
|
||||
/// </summary>
|
||||
internal static class OpenApiMetadataFactory
|
||||
{
|
||||
/// <summary>API version from the OpenAPI spec (docs/api/taskrunner-openapi.yaml).</summary>
|
||||
public const string ApiVersion = "0.1.0-draft";
|
||||
|
||||
internal static Type ResponseType => typeof(OpenApiMetadata);
|
||||
|
||||
/// <summary>
|
||||
/// Creates OpenAPI metadata with versioning and signature information.
|
||||
/// </summary>
|
||||
/// <param name="specUrl">URL path to the OpenAPI spec endpoint.</param>
|
||||
/// <returns>Metadata record with version, build, ETag, and signature.</returns>
|
||||
public static OpenApiMetadata Create(string? specUrl = null)
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly().GetName();
|
||||
var version = assembly.Version?.ToString() ?? "0.0.0";
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var assemblyName = assembly.GetName();
|
||||
|
||||
// Get informational version (includes git hash if available) or fall back to assembly version
|
||||
var informationalVersion = assembly
|
||||
.GetCustomAttribute<AssemblyInformationalVersionAttribute>()?.InformationalVersion;
|
||||
var buildVersion = !string.IsNullOrWhiteSpace(informationalVersion)
|
||||
? informationalVersion
|
||||
: assemblyName.Version?.ToString() ?? "0.0.0";
|
||||
|
||||
var url = string.IsNullOrWhiteSpace(specUrl) ? "/openapi" : specUrl;
|
||||
var etag = CreateWeakEtag(version);
|
||||
var signature = ComputeSignature(url, version);
|
||||
|
||||
return new OpenApiMetadata(url, version, etag, signature);
|
||||
// ETag combines API version and build version for cache invalidation
|
||||
var etag = CreateEtag(ApiVersion, buildVersion);
|
||||
|
||||
// Signature is SHA-256 of spec URL + API version + build version
|
||||
var signature = ComputeSignature(url, ApiVersion, buildVersion);
|
||||
|
||||
return new OpenApiMetadata(url, ApiVersion, buildVersion, etag, signature);
|
||||
}
|
||||
|
||||
private static string CreateWeakEtag(string input)
|
||||
/// <summary>
|
||||
/// Creates a weak ETag from version components.
|
||||
/// </summary>
|
||||
private static string CreateEtag(string apiVersion, string buildVersion)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(input))
|
||||
{
|
||||
input = "0.0.0";
|
||||
}
|
||||
|
||||
return $"W/\"{input}\"";
|
||||
// Use SHA-256 of combined versions for a stable, fixed-length ETag
|
||||
var combined = $"{apiVersion}:{buildVersion}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
|
||||
var shortHash = Convert.ToHexString(hash)[..16].ToLowerInvariant();
|
||||
return $"W/\"{shortHash}\"";
|
||||
}
|
||||
|
||||
private static string ComputeSignature(string url, string build)
|
||||
/// <summary>
|
||||
/// Computes a SHA-256 signature for spec verification.
|
||||
/// </summary>
|
||||
private static string ComputeSignature(string url, string apiVersion, string buildVersion)
|
||||
{
|
||||
var data = System.Text.Encoding.UTF8.GetBytes(url + build);
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(data);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
// Include all metadata components in signature
|
||||
var data = Encoding.UTF8.GetBytes($"{url}|{apiVersion}|{buildVersion}");
|
||||
var hash = SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
internal sealed record OpenApiMetadata(string Url, string Build, string ETag, string Signature);
|
||||
/// <summary>
|
||||
/// OpenAPI metadata for the /.well-known/openapi endpoint.
|
||||
/// </summary>
|
||||
/// <param name="SpecUrl">URL to fetch the full OpenAPI specification.</param>
|
||||
/// <param name="Version">API version (e.g., "0.1.0-draft").</param>
|
||||
/// <param name="BuildVersion">Build/assembly version with optional git info.</param>
|
||||
/// <param name="ETag">ETag for HTTP caching.</param>
|
||||
/// <param name="Signature">SHA-256 signature for verification.</param>
|
||||
internal sealed record OpenApiMetadata(
|
||||
string SpecUrl,
|
||||
string Version,
|
||||
string BuildVersion,
|
||||
string ETag,
|
||||
string Signature);
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ using StellaOps.TaskRunner.Core.TaskPacks;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
using StellaOps.TaskRunner.WebService;
|
||||
using StellaOps.Telemetry.Core;
|
||||
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
builder.Services.Configure<TaskRunnerServiceOptions>(builder.Configuration.GetSection("TaskRunner"));
|
||||
@@ -96,47 +96,47 @@ builder.Services.AddSingleton(sp =>
|
||||
builder.Services.AddSingleton<IPackRunJobScheduler>(sp => sp.GetRequiredService<FilesystemPackRunDispatcher>());
|
||||
builder.Services.AddSingleton<PackRunApprovalDecisionService>();
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
|
||||
var app = builder.Build();
|
||||
|
||||
app.MapOpenApi("/openapi");
|
||||
|
||||
app.MapPost("/v1/task-runner/simulations", async (
|
||||
[FromBody] SimulationRequest request,
|
||||
TaskPackManifestLoader loader,
|
||||
TaskPackPlanner planner,
|
||||
PackRunSimulationEngine simulationEngine,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Manifest))
|
||||
{
|
||||
return Results.BadRequest(new { error = "Manifest is required." });
|
||||
}
|
||||
|
||||
TaskPackManifest manifest;
|
||||
try
|
||||
{
|
||||
manifest = loader.Deserialize(request.Manifest);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
|
||||
}
|
||||
|
||||
var inputs = ConvertInputs(request.Inputs);
|
||||
var planResult = planner.Plan(manifest, inputs);
|
||||
if (!planResult.Success || planResult.Plan is null)
|
||||
{
|
||||
return Results.BadRequest(new
|
||||
{
|
||||
errors = planResult.Errors.Select(error => new { error.Path, error.Message })
|
||||
});
|
||||
}
|
||||
|
||||
var plan = planResult.Plan;
|
||||
var simulation = simulationEngine.Simulate(plan);
|
||||
var response = SimulationMapper.ToResponse(plan, simulation);
|
||||
return Results.Ok(response);
|
||||
|
||||
app.MapPost("/v1/task-runner/simulations", async (
|
||||
[FromBody] SimulationRequest request,
|
||||
TaskPackManifestLoader loader,
|
||||
TaskPackPlanner planner,
|
||||
PackRunSimulationEngine simulationEngine,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.Manifest))
|
||||
{
|
||||
return Results.BadRequest(new { error = "Manifest is required." });
|
||||
}
|
||||
|
||||
TaskPackManifest manifest;
|
||||
try
|
||||
{
|
||||
manifest = loader.Deserialize(request.Manifest);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Invalid manifest", detail = ex.Message });
|
||||
}
|
||||
|
||||
var inputs = ConvertInputs(request.Inputs);
|
||||
var planResult = planner.Plan(manifest, inputs);
|
||||
if (!planResult.Success || planResult.Plan is null)
|
||||
{
|
||||
return Results.BadRequest(new
|
||||
{
|
||||
errors = planResult.Errors.Select(error => new { error.Path, error.Message })
|
||||
});
|
||||
}
|
||||
|
||||
var plan = planResult.Plan;
|
||||
var simulation = simulationEngine.Simulate(plan);
|
||||
var response = SimulationMapper.ToResponse(plan, simulation);
|
||||
return Results.Ok(response);
|
||||
}).WithName("SimulateTaskPack");
|
||||
|
||||
app.MapPost("/v1/task-runner/runs", HandleCreateRun).WithName("CreatePackRun");
|
||||
@@ -162,6 +162,8 @@ app.MapGet("/.well-known/openapi", (HttpResponse response) =>
|
||||
var metadata = OpenApiMetadataFactory.Create("/openapi");
|
||||
response.Headers.ETag = metadata.ETag;
|
||||
response.Headers.Append("X-Signature", metadata.Signature);
|
||||
response.Headers.Append("X-Api-Version", metadata.Version);
|
||||
response.Headers.Append("X-Build-Version", metadata.BuildVersion);
|
||||
return Results.Ok(metadata);
|
||||
}).WithName("GetOpenApiMetadata");
|
||||
|
||||
@@ -432,21 +434,21 @@ async Task<IResult> HandleCancelRun(
|
||||
app.Run();
|
||||
|
||||
static IDictionary<string, JsonNode?>? ConvertInputs(JsonObject? node)
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var dictionary = new Dictionary<string, JsonNode?>(StringComparer.Ordinal);
|
||||
foreach (var property in node)
|
||||
{
|
||||
dictionary[property.Key] = property.Value?.DeepClone();
|
||||
}
|
||||
|
||||
return dictionary;
|
||||
}
|
||||
|
||||
{
|
||||
if (node is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var dictionary = new Dictionary<string, JsonNode?>(StringComparer.Ordinal);
|
||||
foreach (var property in node)
|
||||
{
|
||||
dictionary[property.Key] = property.Value?.DeepClone();
|
||||
}
|
||||
|
||||
return dictionary;
|
||||
}
|
||||
|
||||
internal sealed record CreateRunRequest(string? RunId, string Manifest, JsonObject? Inputs, string? TenantId);
|
||||
|
||||
internal sealed record SimulationRequest(string Manifest, JsonObject? Inputs);
|
||||
@@ -455,40 +457,40 @@ internal sealed record SimulationResponse(
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
IReadOnlyList<SimulationStepResponse> Steps,
|
||||
IReadOnlyList<SimulationOutputResponse> Outputs,
|
||||
bool HasPendingApprovals);
|
||||
|
||||
internal sealed record SimulationStepResponse(
|
||||
string Id,
|
||||
string TemplateId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
string Status,
|
||||
string? StatusReason,
|
||||
string? Uses,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
int? MaxParallel,
|
||||
bool ContinueOnError,
|
||||
IReadOnlyList<SimulationStepResponse> Children);
|
||||
|
||||
internal sealed record SimulationOutputResponse(
|
||||
string Name,
|
||||
string Type,
|
||||
bool RequiresRuntimeValue,
|
||||
string? PathExpression,
|
||||
string? ValueExpression);
|
||||
|
||||
internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
|
||||
|
||||
internal sealed record RunStateResponse(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<RunStateStepResponse> Steps);
|
||||
|
||||
IReadOnlyList<SimulationOutputResponse> Outputs,
|
||||
bool HasPendingApprovals);
|
||||
|
||||
internal sealed record SimulationStepResponse(
|
||||
string Id,
|
||||
string TemplateId,
|
||||
string Kind,
|
||||
bool Enabled,
|
||||
string Status,
|
||||
string? StatusReason,
|
||||
string? Uses,
|
||||
string? ApprovalId,
|
||||
string? GateMessage,
|
||||
int? MaxParallel,
|
||||
bool ContinueOnError,
|
||||
IReadOnlyList<SimulationStepResponse> Children);
|
||||
|
||||
internal sealed record SimulationOutputResponse(
|
||||
string Name,
|
||||
string Type,
|
||||
bool RequiresRuntimeValue,
|
||||
string? PathExpression,
|
||||
string? ValueExpression);
|
||||
|
||||
internal sealed record FailurePolicyResponse(int MaxAttempts, int BackoffSeconds, bool ContinueOnError);
|
||||
|
||||
internal sealed record RunStateResponse(
|
||||
string RunId,
|
||||
string PlanHash,
|
||||
FailurePolicyResponse FailurePolicy,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt,
|
||||
IReadOnlyList<RunStateStepResponse> Steps);
|
||||
|
||||
internal sealed record RunStateStepResponse(
|
||||
string StepId,
|
||||
string Kind,
|
||||
@@ -552,81 +554,81 @@ internal static class RunLogMapper
|
||||
|
||||
internal static class SimulationMapper
|
||||
{
|
||||
public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
|
||||
{
|
||||
var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = result.Steps.Select(MapStep).ToList();
|
||||
var outputs = result.Outputs.Select(MapOutput).ToList();
|
||||
|
||||
return new SimulationResponse(
|
||||
plan.Hash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
steps,
|
||||
outputs,
|
||||
result.HasPendingApprovals);
|
||||
}
|
||||
|
||||
private static SimulationStepResponse MapStep(PackRunSimulationNode node)
|
||||
{
|
||||
var children = node.Children.Select(MapStep).ToList();
|
||||
return new SimulationStepResponse(
|
||||
node.Id,
|
||||
node.TemplateId,
|
||||
node.Kind.ToString(),
|
||||
node.Enabled,
|
||||
node.Status.ToString(),
|
||||
node.Status.ToString() switch
|
||||
{
|
||||
nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
|
||||
nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
|
||||
nameof(PackRunSimulationStatus.Skipped) => "condition-false",
|
||||
_ => null
|
||||
},
|
||||
node.Uses,
|
||||
node.ApprovalId,
|
||||
node.GateMessage,
|
||||
node.MaxParallel,
|
||||
node.ContinueOnError,
|
||||
children);
|
||||
}
|
||||
|
||||
private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
|
||||
=> new(
|
||||
output.Name,
|
||||
output.Type,
|
||||
output.RequiresRuntimeValue,
|
||||
output.Path?.Expression,
|
||||
output.Expression?.Expression);
|
||||
}
|
||||
|
||||
internal static class RunStateMapper
|
||||
{
|
||||
public static RunStateResponse ToResponse(PackRunState state)
|
||||
{
|
||||
var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new RunStateStepResponse(
|
||||
step.StepId,
|
||||
step.Kind.ToString(),
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status.ToString(),
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new RunStateResponse(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
public static SimulationResponse ToResponse(TaskPackPlan plan, PackRunSimulationResult result)
|
||||
{
|
||||
var failurePolicy = result.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = result.Steps.Select(MapStep).ToList();
|
||||
var outputs = result.Outputs.Select(MapOutput).ToList();
|
||||
|
||||
return new SimulationResponse(
|
||||
plan.Hash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
steps,
|
||||
outputs,
|
||||
result.HasPendingApprovals);
|
||||
}
|
||||
|
||||
private static SimulationStepResponse MapStep(PackRunSimulationNode node)
|
||||
{
|
||||
var children = node.Children.Select(MapStep).ToList();
|
||||
return new SimulationStepResponse(
|
||||
node.Id,
|
||||
node.TemplateId,
|
||||
node.Kind.ToString(),
|
||||
node.Enabled,
|
||||
node.Status.ToString(),
|
||||
node.Status.ToString() switch
|
||||
{
|
||||
nameof(PackRunSimulationStatus.RequiresApproval) => "requires-approval",
|
||||
nameof(PackRunSimulationStatus.RequiresPolicy) => "requires-policy",
|
||||
nameof(PackRunSimulationStatus.Skipped) => "condition-false",
|
||||
_ => null
|
||||
},
|
||||
node.Uses,
|
||||
node.ApprovalId,
|
||||
node.GateMessage,
|
||||
node.MaxParallel,
|
||||
node.ContinueOnError,
|
||||
children);
|
||||
}
|
||||
|
||||
private static SimulationOutputResponse MapOutput(PackRunSimulationOutput output)
|
||||
=> new(
|
||||
output.Name,
|
||||
output.Type,
|
||||
output.RequiresRuntimeValue,
|
||||
output.Path?.Expression,
|
||||
output.Expression?.Expression);
|
||||
}
|
||||
|
||||
internal static class RunStateMapper
|
||||
{
|
||||
public static RunStateResponse ToResponse(PackRunState state)
|
||||
{
|
||||
var failurePolicy = state.FailurePolicy ?? PackRunExecutionGraph.DefaultFailurePolicy;
|
||||
var steps = state.Steps.Values
|
||||
.OrderBy(step => step.StepId, StringComparer.Ordinal)
|
||||
.Select(step => new RunStateStepResponse(
|
||||
step.StepId,
|
||||
step.Kind.ToString(),
|
||||
step.Enabled,
|
||||
step.ContinueOnError,
|
||||
step.MaxParallel,
|
||||
step.ApprovalId,
|
||||
step.GateMessage,
|
||||
step.Status.ToString(),
|
||||
step.Attempts,
|
||||
step.LastTransitionAt,
|
||||
step.NextAttemptAt,
|
||||
step.StatusReason))
|
||||
.ToList();
|
||||
|
||||
return new RunStateResponse(
|
||||
state.RunId,
|
||||
state.PlanHash,
|
||||
new FailurePolicyResponse(failurePolicy.MaxAttempts, failurePolicy.BackoffSeconds, failurePolicy.ContinueOnError),
|
||||
state.CreatedAt,
|
||||
state.UpdatedAt,
|
||||
steps);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user