Restructure solution layout by module
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

This commit is contained in:
root
2025-10-28 15:10:40 +02:00
parent 4e3e575db5
commit 68da90a11a
4103 changed files with 192899 additions and 187024 deletions

View File

@@ -0,0 +1,4 @@
# StellaOps.Scheduler.ImpactIndex — Agent Charter
## Mission
Build the global impact index per `docs/ARCHITECTURE_SCHEDULER.md` (roaring bitmaps, selectors, snapshotting).

View File

@@ -0,0 +1,615 @@
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.IO;
using System.IO.Enumeration;
using System.Reflection;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.ImpactIndex;
/// <summary>
/// Fixture-backed implementation of <see cref="IImpactIndex"/> used while the real index is under construction.
/// </summary>
public sealed class FixtureImpactIndex : IImpactIndex
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
};
private readonly ImpactIndexStubOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<FixtureImpactIndex> _logger;
private readonly SemaphoreSlim _initializationLock = new(1, 1);
private FixtureIndexState? _state;
public FixtureImpactIndex(
ImpactIndexStubOptions options,
TimeProvider? timeProvider,
ILogger<FixtureImpactIndex> logger)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async ValueTask<ImpactSet> ResolveByPurlsAsync(
IEnumerable<string> purls,
bool usageOnly,
Selector selector,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(purls);
ArgumentNullException.ThrowIfNull(selector);
var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false);
var normalizedPurls = NormalizeKeys(purls);
if (normalizedPurls.Length == 0)
{
return CreateImpactSet(state, selector, Enumerable.Empty<FixtureMatch>(), usageOnly);
}
var matches = new List<FixtureMatch>();
foreach (var purl in normalizedPurls)
{
cancellationToken.ThrowIfCancellationRequested();
if (!state.PurlIndex.TryGetValue(purl, out var componentMatches))
{
continue;
}
foreach (var component in componentMatches)
{
var usedByEntrypoint = component.Component.UsedByEntrypoint;
if (usageOnly && !usedByEntrypoint)
{
continue;
}
matches.Add(new FixtureMatch(component.Image, usedByEntrypoint));
}
}
return CreateImpactSet(state, selector, matches, usageOnly);
}
public async ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(
IEnumerable<string> vulnerabilityIds,
bool usageOnly,
Selector selector,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(vulnerabilityIds);
ArgumentNullException.ThrowIfNull(selector);
var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false);
// The stub does not maintain a vulnerability → purl projection, so we return an empty result.
if (_logger.IsEnabled(LogLevel.Debug))
{
var first = vulnerabilityIds.FirstOrDefault(static id => !string.IsNullOrWhiteSpace(id));
if (first is not null)
{
_logger.LogDebug(
"ImpactIndex stub received ResolveByVulnerabilitiesAsync for '{VulnerabilityId}' but mappings are not available.",
first);
}
}
return CreateImpactSet(state, selector, Enumerable.Empty<FixtureMatch>(), usageOnly);
}
public async ValueTask<ImpactSet> ResolveAllAsync(
Selector selector,
bool usageOnly,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(selector);
var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false);
var matches = state.ImagesByDigest.Values
.Select(image => new FixtureMatch(image, image.UsedByEntrypoint))
.Where(match => !usageOnly || match.UsedByEntrypoint);
return CreateImpactSet(state, selector, matches, usageOnly);
}
private async Task<FixtureIndexState> EnsureInitializedAsync(CancellationToken cancellationToken)
{
if (_state is not null)
{
return _state;
}
await _initializationLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_state is not null)
{
return _state;
}
var state = await LoadAsync(cancellationToken).ConfigureAwait(false);
_state = state;
_logger.LogInformation(
"ImpactIndex stub loaded {ImageCount} fixture images from {SourceDescription}.",
state.ImagesByDigest.Count,
state.SourceDescription);
return state;
}
finally
{
_initializationLock.Release();
}
}
private async Task<FixtureIndexState> LoadAsync(CancellationToken cancellationToken)
{
var images = new List<FixtureImage>();
string? sourceDescription = null;
if (!string.IsNullOrWhiteSpace(_options.FixtureDirectory))
{
var directory = ResolveDirectoryPath(_options.FixtureDirectory!);
if (Directory.Exists(directory))
{
images.AddRange(await LoadFromDirectoryAsync(directory, cancellationToken).ConfigureAwait(false));
sourceDescription = directory;
}
else
{
_logger.LogWarning(
"ImpactIndex stub fixture directory '{Directory}' was not found. Falling back to embedded fixtures.",
directory);
}
}
if (images.Count == 0)
{
images.AddRange(await LoadFromResourcesAsync(cancellationToken).ConfigureAwait(false));
sourceDescription ??= "embedded:scheduler-impact-index-fixtures";
}
if (images.Count == 0)
{
throw new InvalidOperationException("No BOM-Index fixtures were found for the ImpactIndex stub.");
}
return BuildState(images, sourceDescription!, _options.SnapshotId);
}
private static string ResolveDirectoryPath(string path)
{
if (Path.IsPathRooted(path))
{
return path;
}
var basePath = AppContext.BaseDirectory;
return Path.GetFullPath(Path.Combine(basePath, path));
}
private static async Task<IReadOnlyList<FixtureImage>> LoadFromDirectoryAsync(
string directory,
CancellationToken cancellationToken)
{
var results = new List<FixtureImage>();
foreach (var file in Directory.EnumerateFiles(directory, "bom-index.json", SearchOption.AllDirectories)
.OrderBy(static file => file, StringComparer.Ordinal))
{
cancellationToken.ThrowIfCancellationRequested();
await using var stream = File.OpenRead(file);
var document = await JsonSerializer.DeserializeAsync<BomIndexDocument>(stream, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
if (document is null)
{
continue;
}
results.Add(CreateFixtureImage(document));
}
return results;
}
private static async Task<IReadOnlyList<FixtureImage>> LoadFromResourcesAsync(CancellationToken cancellationToken)
{
var assembly = typeof(FixtureImpactIndex).Assembly;
var resourceNames = assembly
.GetManifestResourceNames()
.Where(static name => name.EndsWith(".bom-index.json", StringComparison.OrdinalIgnoreCase))
.OrderBy(static name => name, StringComparer.Ordinal)
.ToArray();
var results = new List<FixtureImage>(resourceNames.Length);
foreach (var resourceName in resourceNames)
{
cancellationToken.ThrowIfCancellationRequested();
await using var stream = assembly.GetManifestResourceStream(resourceName);
if (stream is null)
{
continue;
}
var document = await JsonSerializer.DeserializeAsync<BomIndexDocument>(stream, SerializerOptions, cancellationToken)
.ConfigureAwait(false);
if (document is null)
{
continue;
}
results.Add(CreateFixtureImage(document));
}
return results;
}
private static FixtureIndexState BuildState(
IReadOnlyList<FixtureImage> images,
string sourceDescription,
string snapshotId)
{
var imagesByDigest = images
.GroupBy(static image => image.Digest, StringComparer.OrdinalIgnoreCase)
.ToImmutableDictionary(
static group => group.Key,
static group => group
.OrderBy(static image => image.Repository, StringComparer.Ordinal)
.ThenBy(static image => image.Registry, StringComparer.Ordinal)
.ThenBy(static image => image.Tags.Length, Comparer<int>.Default)
.First(),
StringComparer.OrdinalIgnoreCase);
var purlIndexBuilder = new Dictionary<string, List<FixtureComponentMatch>>(StringComparer.OrdinalIgnoreCase);
foreach (var image in images)
{
foreach (var component in image.Components)
{
if (!purlIndexBuilder.TryGetValue(component.Purl, out var list))
{
list = new List<FixtureComponentMatch>();
purlIndexBuilder[component.Purl] = list;
}
list.Add(new FixtureComponentMatch(image, component));
}
}
var purlIndex = purlIndexBuilder.ToImmutableDictionary(
static entry => entry.Key,
static entry => entry.Value
.OrderBy(static item => item.Image.Digest, StringComparer.Ordinal)
.Select(static item => new FixtureComponentMatch(item.Image, item.Component))
.ToImmutableArray(),
StringComparer.OrdinalIgnoreCase);
var generatedAt = images.Count == 0
? DateTimeOffset.UnixEpoch
: images.Max(static image => image.GeneratedAt);
return new FixtureIndexState(imagesByDigest, purlIndex, generatedAt, sourceDescription, snapshotId);
}
private ImpactSet CreateImpactSet(
FixtureIndexState state,
Selector selector,
IEnumerable<FixtureMatch> matches,
bool usageOnly)
{
var aggregated = new Dictionary<string, ImpactImageBuilder>(StringComparer.OrdinalIgnoreCase);
foreach (var match in matches)
{
if (!ImageMatchesSelector(match.Image, selector))
{
continue;
}
if (!aggregated.TryGetValue(match.Image.Digest, out var builder))
{
builder = new ImpactImageBuilder(match.Image);
aggregated[match.Image.Digest] = builder;
}
builder.MarkUsedByEntrypoint(match.UsedByEntrypoint);
}
var images = aggregated.Values
.Select(static builder => builder.Build())
.OrderBy(static image => image.ImageDigest, StringComparer.Ordinal)
.ToImmutableArray();
return new ImpactSet(
selector,
images,
usageOnly,
state.GeneratedAt == DateTimeOffset.UnixEpoch
? _timeProvider.GetUtcNow()
: state.GeneratedAt,
images.Length,
state.SnapshotId,
SchedulerSchemaVersions.ImpactSet);
}
private static bool ImageMatchesSelector(FixtureImage image, Selector selector)
{
if (selector is null)
{
return true;
}
if (selector.Digests.Length > 0 &&
!selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase))
{
return false;
}
if (selector.Repositories.Length > 0)
{
var repositoryMatch = selector.Repositories.Any(repo =>
string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) ||
string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase));
if (!repositoryMatch)
{
return false;
}
}
if (selector.Namespaces.Length > 0)
{
if (image.Namespaces.IsDefaultOrEmpty)
{
return false;
}
var namespaceMatch = selector.Namespaces.Any(namespaceId =>
image.Namespaces.Contains(namespaceId, StringComparer.OrdinalIgnoreCase));
if (!namespaceMatch)
{
return false;
}
}
if (selector.IncludeTags.Length > 0)
{
if (image.Tags.IsDefaultOrEmpty)
{
return false;
}
var tagMatch = selector.IncludeTags.Any(pattern =>
MatchesAnyTag(image.Tags, pattern));
if (!tagMatch)
{
return false;
}
}
if (selector.Labels.Length > 0)
{
if (image.Labels.Count == 0)
{
return false;
}
foreach (var labelSelector in selector.Labels)
{
if (!image.Labels.TryGetValue(labelSelector.Key, out var value))
{
return false;
}
if (labelSelector.Values.Length > 0 &&
!labelSelector.Values.Contains(value, StringComparer.OrdinalIgnoreCase))
{
return false;
}
}
}
return selector.Scope switch
{
SelectorScope.ByDigest => selector.Digests.Length == 0
? true
: selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase),
SelectorScope.ByRepository => selector.Repositories.Length == 0
? true
: selector.Repositories.Any(repo =>
string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) ||
string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)),
SelectorScope.ByNamespace => selector.Namespaces.Length == 0
? true
: !image.Namespaces.IsDefaultOrEmpty &&
selector.Namespaces.Any(namespaceId =>
image.Namespaces.Contains(namespaceId, StringComparer.OrdinalIgnoreCase)),
SelectorScope.ByLabels => selector.Labels.Length == 0
? true
: selector.Labels.All(label =>
image.Labels.TryGetValue(label.Key, out var value) &&
(label.Values.Length == 0 || label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))),
_ => true,
};
}
private static bool MatchesAnyTag(ImmutableArray<string> tags, string pattern)
{
foreach (var tag in tags)
{
if (FileSystemName.MatchesSimpleExpression(pattern, tag, ignoreCase: true))
{
return true;
}
}
return false;
}
private static FixtureImage CreateFixtureImage(BomIndexDocument document)
{
if (document.Image is null)
{
throw new InvalidOperationException("BOM-Index image metadata is required.");
}
var digest = Validation.EnsureDigestFormat(document.Image.Digest, "image.digest");
var (registry, repository) = SplitRepository(document.Image.Repository);
var tags = string.IsNullOrWhiteSpace(document.Image.Tag)
? ImmutableArray<string>.Empty
: ImmutableArray.Create(document.Image.Tag.Trim());
var components = (document.Components ?? Array.Empty<BomIndexComponent>())
.Where(static component => !string.IsNullOrWhiteSpace(component.Purl))
.Select(component => new FixtureComponent(
component.Purl!.Trim(),
component.Usage?.Any(static usage =>
usage.Equals("runtime", StringComparison.OrdinalIgnoreCase) ||
usage.Equals("usedByEntrypoint", StringComparison.OrdinalIgnoreCase)) == true))
.OrderBy(static component => component.Purl, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return new FixtureImage(
digest,
registry,
repository,
ImmutableArray<string>.Empty,
tags,
ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase),
components,
document.GeneratedAt == default ? DateTimeOffset.UnixEpoch : document.GeneratedAt.ToUniversalTime(),
components.Any(static component => component.UsedByEntrypoint));
}
private static (string Registry, string Repository) SplitRepository(string repository)
{
var normalized = Validation.EnsureNotNullOrWhiteSpace(repository, nameof(repository));
var separatorIndex = normalized.IndexOf('/');
if (separatorIndex < 0)
{
return ("docker.io", normalized);
}
var registry = normalized[..separatorIndex];
var repo = normalized[(separatorIndex + 1)..];
if (string.IsNullOrWhiteSpace(repo))
{
throw new ArgumentException("Repository segment is required after registry.", nameof(repository));
}
return (registry.Trim(), repo.Trim());
}
private static string[] NormalizeKeys(IEnumerable<string> values)
{
return values
.Where(static value => !string.IsNullOrWhiteSpace(value))
.Select(static value => value.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
}
private readonly record struct FixtureMatch(FixtureImage Image, bool UsedByEntrypoint);
private sealed record FixtureImage(
string Digest,
string Registry,
string Repository,
ImmutableArray<string> Namespaces,
ImmutableArray<string> Tags,
ImmutableSortedDictionary<string, string> Labels,
ImmutableArray<FixtureComponent> Components,
DateTimeOffset GeneratedAt,
bool UsedByEntrypoint);
private sealed record FixtureComponent(string Purl, bool UsedByEntrypoint);
private sealed record FixtureComponentMatch(FixtureImage Image, FixtureComponent Component);
private sealed record FixtureIndexState(
ImmutableDictionary<string, FixtureImage> ImagesByDigest,
ImmutableDictionary<string, ImmutableArray<FixtureComponentMatch>> PurlIndex,
DateTimeOffset GeneratedAt,
string SourceDescription,
string SnapshotId);
private sealed class ImpactImageBuilder
{
private readonly FixtureImage _image;
private bool _usedByEntrypoint;
public ImpactImageBuilder(FixtureImage image)
{
_image = image;
}
public void MarkUsedByEntrypoint(bool usedByEntrypoint)
{
_usedByEntrypoint |= usedByEntrypoint;
}
public ImpactImage Build()
{
return new ImpactImage(
_image.Digest,
_image.Registry,
_image.Repository,
_image.Namespaces,
_image.Tags,
_usedByEntrypoint,
_image.Labels);
}
}
private sealed record BomIndexDocument
{
[JsonPropertyName("schema")]
public string? Schema { get; init; }
[JsonPropertyName("image")]
public BomIndexImage? Image { get; init; }
[JsonPropertyName("generatedAt")]
public DateTimeOffset GeneratedAt { get; init; }
[JsonPropertyName("components")]
public IReadOnlyList<BomIndexComponent>? Components { get; init; }
}
private sealed record BomIndexImage
{
[JsonPropertyName("repository")]
public string Repository { get; init; } = string.Empty;
[JsonPropertyName("digest")]
public string Digest { get; init; } = string.Empty;
[JsonPropertyName("tag")]
public string? Tag { get; init; }
}
private sealed record BomIndexComponent
{
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("usage")]
public IReadOnlyList<string>? Usage { get; init; }
}
}

View File

@@ -0,0 +1,46 @@
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.ImpactIndex;
/// <summary>
/// Provides read access to the scheduler impact index.
/// </summary>
public interface IImpactIndex
{
/// <summary>
/// Resolves the impacted image set for the provided package URLs.
/// </summary>
/// <param name="purls">Package URLs to look up.</param>
/// <param name="usageOnly">When true, restricts results to components marked as runtime/entrypoint usage.</param>
/// <param name="selector">Selector scoping the query.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<ImpactSet> ResolveByPurlsAsync(
IEnumerable<string> purls,
bool usageOnly,
Selector selector,
CancellationToken cancellationToken = default);
/// <summary>
/// Resolves impacted images by vulnerability identifiers if the index has the mapping available.
/// </summary>
/// <param name="vulnerabilityIds">Vulnerability identifiers to look up.</param>
/// <param name="usageOnly">When true, restricts results to components marked as runtime/entrypoint usage.</param>
/// <param name="selector">Selector scoping the query.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(
IEnumerable<string> vulnerabilityIds,
bool usageOnly,
Selector selector,
CancellationToken cancellationToken = default);
/// <summary>
/// Resolves all tracked images for the provided selector.
/// </summary>
/// <param name="selector">Selector scoping the query.</param>
/// <param name="usageOnly">When true, restricts results to images with entrypoint usage.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<ImpactSet> ResolveAllAsync(
Selector selector,
bool usageOnly,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,17 @@
using System;
using System.Collections.Immutable;
namespace StellaOps.Scheduler.ImpactIndex;
internal sealed record ImpactImageRecord(
int ImageId,
string TenantId,
string Digest,
string Registry,
string Repository,
ImmutableArray<string> Namespaces,
ImmutableArray<string> Tags,
ImmutableSortedDictionary<string, string> Labels,
DateTimeOffset GeneratedAt,
ImmutableArray<string> Components,
ImmutableArray<string> EntrypointComponents);

View File

@@ -0,0 +1,26 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Scheduler.ImpactIndex;
/// <summary>
/// ServiceCollection helpers for wiring the fixture-backed impact index.
/// </summary>
public static class ImpactIndexServiceCollectionExtensions
{
public static IServiceCollection AddImpactIndexStub(
this IServiceCollection services,
Action<ImpactIndexStubOptions>? configure = null)
{
ArgumentNullException.ThrowIfNull(services);
var options = new ImpactIndexStubOptions();
configure?.Invoke(options);
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton(options);
services.TryAddSingleton<IImpactIndex, FixtureImpactIndex>();
return services;
}
}

View File

@@ -0,0 +1,19 @@
namespace StellaOps.Scheduler.ImpactIndex;
/// <summary>
/// Options controlling the fixture-backed impact index stub.
/// </summary>
public sealed class ImpactIndexStubOptions
{
/// <summary>
/// Optional absolute or relative directory containing BOM-Index JSON fixtures.
/// When not supplied or not found, embedded fixtures ship with the assembly are used instead.
/// </summary>
public string? FixtureDirectory { get; set; }
/// <summary>
/// Snapshot identifier reported in the generated <see cref="StellaOps.Scheduler.Models.ImpactSet"/>.
/// Defaults to <c>samples/impact-index-stub</c>.
/// </summary>
public string SnapshotId { get; set; } = "samples/impact-index-stub";
}

View File

@@ -0,0 +1,119 @@
using System.Buffers.Binary;
using System.Collections.Immutable;
using System.Globalization;
using System.Text;
using Collections.Special;
namespace StellaOps.Scheduler.ImpactIndex.Ingestion;
internal sealed record BomIndexComponent(string Key, bool UsedByEntrypoint);
internal sealed record BomIndexDocument(string ImageDigest, DateTimeOffset GeneratedAt, ImmutableArray<BomIndexComponent> Components);
internal static class BomIndexReader
{
private const int HeaderMagicLength = 7;
private static readonly byte[] Magic = Encoding.ASCII.GetBytes("BOMIDX1");
public static BomIndexDocument Read(Stream stream)
{
ArgumentNullException.ThrowIfNull(stream);
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
Span<byte> magicBuffer = stackalloc byte[HeaderMagicLength];
if (reader.Read(magicBuffer) != HeaderMagicLength || !magicBuffer.SequenceEqual(Magic))
{
throw new InvalidOperationException("Invalid BOM index header magic.");
}
var version = reader.ReadUInt16();
if (version != 1)
{
throw new NotSupportedException($"Unsupported BOM index version '{version}'.");
}
var flags = reader.ReadUInt16();
var hasEntrypoints = (flags & 0x1) == 1;
var digestLength = reader.ReadUInt16();
var digestBytes = reader.ReadBytes(digestLength);
var imageDigest = Encoding.UTF8.GetString(digestBytes);
var generatedAtMicros = reader.ReadInt64();
var generatedAt = DateTimeOffset.FromUnixTimeMilliseconds(generatedAtMicros / 1000)
.AddTicks((generatedAtMicros % 1000) * TimeSpan.TicksPerMillisecond / 1000);
var layerCount = checked((int)reader.ReadUInt32());
var componentCount = checked((int)reader.ReadUInt32());
var entrypointCount = checked((int)reader.ReadUInt32());
// Layer table (we only need to skip entries but validate length)
for (var i = 0; i < layerCount; i++)
{
_ = ReadUtf8String(reader);
}
var componentKeys = new string[componentCount];
for (var i = 0; i < componentCount; i++)
{
componentKeys[i] = ReadUtf8String(reader);
}
for (var i = 0; i < componentCount; i++)
{
var length = reader.ReadUInt32();
if (length > 0)
{
var payload = reader.ReadBytes(checked((int)length));
using var bitmapStream = new MemoryStream(payload, writable: false);
_ = RoaringBitmap.Deserialize(bitmapStream);
}
}
var entrypointPresence = new bool[componentCount];
if (hasEntrypoints && entrypointCount > 0)
{
// Entrypoint table (skip strings)
for (var i = 0; i < entrypointCount; i++)
{
_ = ReadUtf8String(reader);
}
for (var i = 0; i < componentCount; i++)
{
var length = reader.ReadUInt32();
if (length == 0)
{
entrypointPresence[i] = false;
continue;
}
var payload = reader.ReadBytes(checked((int)length));
using var bitmapStream = new MemoryStream(payload, writable: false);
var bitmap = RoaringBitmap.Deserialize(bitmapStream);
entrypointPresence[i] = bitmap.Any();
}
}
var builder = ImmutableArray.CreateBuilder<BomIndexComponent>(componentCount);
for (var i = 0; i < componentCount; i++)
{
var key = componentKeys[i];
builder.Add(new BomIndexComponent(key, entrypointPresence[i]));
}
return new BomIndexDocument(imageDigest, generatedAt, builder.MoveToImmutable());
}
private static string ReadUtf8String(BinaryReader reader)
{
var length = reader.ReadUInt16();
if (length == 0)
{
return string.Empty;
}
var bytes = reader.ReadBytes(length);
return Encoding.UTF8.GetString(bytes);
}
}

View File

@@ -0,0 +1,28 @@
using System;
using System.Collections.Immutable;
using System.IO;
namespace StellaOps.Scheduler.ImpactIndex.Ingestion;
/// <summary>
/// Describes a BOM-Index ingestion payload for the scheduler impact index.
/// </summary>
public sealed record ImpactIndexIngestionRequest
{
public required string TenantId { get; init; }
public required string ImageDigest { get; init; }
public required string Registry { get; init; }
public required string Repository { get; init; }
public ImmutableArray<string> Namespaces { get; init; } = ImmutableArray<string>.Empty;
public ImmutableArray<string> Tags { get; init; } = ImmutableArray<string>.Empty;
public ImmutableSortedDictionary<string, string> Labels { get; init; } = ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase);
public required Stream BomIndexStream { get; init; }
= Stream.Null;
}

View File

@@ -0,0 +1,15 @@
# ImpactIndex Stub Removal Tracker
- **Created:** 2025-10-20
- **Owner:** Scheduler ImpactIndex Guild
- **Reference Task:** SCHED-IMPACT-16-300 (fixture-backed stub)
## Exit Reminder
Replace `FixtureImpactIndex` with the roaring bitmap-backed implementation once SCHED-IMPACT-16-301/302 are completed, then delete:
1. Stub classes (`FixtureImpactIndex`, `ImpactIndexStubOptions`, `ImpactIndexServiceCollectionExtensions`).
2. Embedded sample fixture wiring in `StellaOps.Scheduler.ImpactIndex.csproj`.
3. Temporary unit tests in `StellaOps.Scheduler.ImpactIndex.Tests`.
Remove this file when the production ImpactIndex replaces the stub.

View File

@@ -0,0 +1,481 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
using Collections.Special;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.ImpactIndex.Ingestion;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.ImpactIndex;
/// <summary>
/// Roaring bitmap-backed implementation of the scheduler impact index.
/// </summary>
public sealed class RoaringImpactIndex : IImpactIndex
{
private readonly object _gate = new();
private readonly Dictionary<string, int> _imageIds = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<int, ImpactImageRecord> _images = new();
private readonly Dictionary<string, RoaringBitmap> _containsByPurl = new(StringComparer.OrdinalIgnoreCase);
private readonly Dictionary<string, RoaringBitmap> _usedByEntrypointByPurl = new(StringComparer.OrdinalIgnoreCase);
private readonly ILogger<RoaringImpactIndex> _logger;
private readonly TimeProvider _timeProvider;
public RoaringImpactIndex(ILogger<RoaringImpactIndex> logger, TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task IngestAsync(ImpactIndexIngestionRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.BomIndexStream);
using var buffer = new MemoryStream();
await request.BomIndexStream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false);
buffer.Position = 0;
var document = BomIndexReader.Read(buffer);
if (!string.Equals(document.ImageDigest, request.ImageDigest, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"BOM-Index digest mismatch. Header '{document.ImageDigest}', request '{request.ImageDigest}'.");
}
var tenantId = request.TenantId ?? throw new ArgumentNullException(nameof(request.TenantId));
var registry = request.Registry ?? throw new ArgumentNullException(nameof(request.Registry));
var repository = request.Repository ?? throw new ArgumentNullException(nameof(request.Repository));
var namespaces = request.Namespaces.IsDefault ? ImmutableArray<string>.Empty : request.Namespaces;
var tags = request.Tags.IsDefault ? ImmutableArray<string>.Empty : request.Tags;
var labels = request.Labels.Count == 0
? ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase)
: request.Labels;
var componentKeys = document.Components
.Select(component => component.Key)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var entrypointComponents = document.Components
.Where(component => component.UsedByEntrypoint)
.Select(component => component.Key)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
lock (_gate)
{
var imageId = EnsureImageId(request.ImageDigest);
if (_images.TryGetValue(imageId, out var existing))
{
RemoveImageComponents(existing);
}
var metadata = new ImpactImageRecord(
imageId,
tenantId,
request.ImageDigest,
registry,
repository,
namespaces,
tags,
labels,
document.GeneratedAt,
componentKeys,
entrypointComponents);
_images[imageId] = metadata;
_imageIds[request.ImageDigest] = imageId;
foreach (var key in componentKeys)
{
var bitmap = _containsByPurl.GetValueOrDefault(key);
_containsByPurl[key] = AddImageToBitmap(bitmap, imageId);
}
foreach (var key in entrypointComponents)
{
var bitmap = _usedByEntrypointByPurl.GetValueOrDefault(key);
_usedByEntrypointByPurl[key] = AddImageToBitmap(bitmap, imageId);
}
}
_logger.LogInformation(
"ImpactIndex ingested BOM-Index for {Digest} ({TenantId}/{Repository}). Components={ComponentCount} EntrypointComponents={EntrypointCount}",
request.ImageDigest,
tenantId,
repository,
componentKeys.Length,
entrypointComponents.Length);
}
public ValueTask<ImpactSet> ResolveByPurlsAsync(
IEnumerable<string> purls,
bool usageOnly,
Selector selector,
CancellationToken cancellationToken = default)
=> ValueTask.FromResult(ResolveByPurlsCore(purls, usageOnly, selector));
public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(
IEnumerable<string> vulnerabilityIds,
bool usageOnly,
Selector selector,
CancellationToken cancellationToken = default)
=> ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly));
public ValueTask<ImpactSet> ResolveAllAsync(
Selector selector,
bool usageOnly,
CancellationToken cancellationToken = default)
=> ValueTask.FromResult(ResolveAllCore(selector, usageOnly));
private ImpactSet ResolveByPurlsCore(IEnumerable<string> purls, bool usageOnly, Selector selector)
{
ArgumentNullException.ThrowIfNull(purls);
ArgumentNullException.ThrowIfNull(selector);
var normalized = purls
.Where(static purl => !string.IsNullOrWhiteSpace(purl))
.Select(static purl => purl.Trim())
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToArray();
if (normalized.Length == 0)
{
return CreateEmptyImpactSet(selector, usageOnly);
}
RoaringBitmap imageIds;
lock (_gate)
{
imageIds = RoaringBitmap.Create(Array.Empty<int>());
foreach (var purl in normalized)
{
if (_containsByPurl.TryGetValue(purl, out var bitmap))
{
imageIds = imageIds | bitmap;
}
}
}
return BuildImpactSet(imageIds, selector, usageOnly);
}
private ImpactSet ResolveAllCore(Selector selector, bool usageOnly)
{
ArgumentNullException.ThrowIfNull(selector);
RoaringBitmap bitmap;
lock (_gate)
{
var ids = _images.Keys.OrderBy(id => id).ToArray();
bitmap = RoaringBitmap.Create(ids);
}
return BuildImpactSet(bitmap, selector, usageOnly);
}
private ImpactSet BuildImpactSet(RoaringBitmap imageIds, Selector selector, bool usageOnly)
{
var images = new List<ImpactImage>();
var latestGeneratedAt = DateTimeOffset.MinValue;
lock (_gate)
{
foreach (var imageId in imageIds)
{
if (!_images.TryGetValue(imageId, out var metadata))
{
continue;
}
if (!ImageMatchesSelector(metadata, selector))
{
continue;
}
if (usageOnly && metadata.EntrypointComponents.Length == 0)
{
continue;
}
if (metadata.GeneratedAt > latestGeneratedAt)
{
latestGeneratedAt = metadata.GeneratedAt;
}
images.Add(new ImpactImage(
metadata.Digest,
metadata.Registry,
metadata.Repository,
metadata.Namespaces,
metadata.Tags,
metadata.EntrypointComponents.Length > 0,
metadata.Labels));
}
}
if (images.Count == 0)
{
return CreateEmptyImpactSet(selector, usageOnly);
}
images.Sort(static (left, right) => string.Compare(left.ImageDigest, right.ImageDigest, StringComparison.Ordinal));
var generatedAt = latestGeneratedAt == DateTimeOffset.MinValue ? _timeProvider.GetUtcNow() : latestGeneratedAt;
return new ImpactSet(
selector,
images.ToImmutableArray(),
usageOnly,
generatedAt,
images.Count,
snapshotId: null,
schemaVersion: SchedulerSchemaVersions.ImpactSet);
}
private ImpactSet CreateEmptyImpactSet(Selector selector, bool usageOnly)
{
return new ImpactSet(
selector,
ImmutableArray<ImpactImage>.Empty,
usageOnly,
_timeProvider.GetUtcNow(),
0,
snapshotId: null,
schemaVersion: SchedulerSchemaVersions.ImpactSet);
}
private static bool ImageMatchesSelector(ImpactImageRecord image, Selector selector)
{
if (selector.TenantId is not null && !string.Equals(selector.TenantId, image.TenantId, StringComparison.Ordinal))
{
return false;
}
if (!MatchesScope(image, selector))
{
return false;
}
if (selector.Digests.Length > 0 && !selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase))
{
return false;
}
if (selector.Repositories.Length > 0)
{
var repoMatch = selector.Repositories.Any(repo =>
string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) ||
string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase));
if (!repoMatch)
{
return false;
}
}
if (selector.Namespaces.Length > 0)
{
if (image.Namespaces.IsDefaultOrEmpty)
{
return false;
}
var namespaceMatch = selector.Namespaces.Any(ns => image.Namespaces.Contains(ns, StringComparer.OrdinalIgnoreCase));
if (!namespaceMatch)
{
return false;
}
}
if (selector.IncludeTags.Length > 0)
{
if (image.Tags.IsDefaultOrEmpty)
{
return false;
}
var tagMatch = selector.IncludeTags.Any(pattern => image.Tags.Any(tag => MatchesTagPattern(tag, pattern)));
if (!tagMatch)
{
return false;
}
}
if (selector.Labels.Length > 0)
{
if (image.Labels.Count == 0)
{
return false;
}
foreach (var label in selector.Labels)
{
if (!image.Labels.TryGetValue(label.Key, out var value))
{
return false;
}
if (label.Values.Length > 0 && !label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))
{
return false;
}
}
}
return true;
}
private void RemoveImageComponents(ImpactImageRecord record)
{
foreach (var key in record.Components)
{
if (_containsByPurl.TryGetValue(key, out var bitmap))
{
var updated = RemoveImageFromBitmap(bitmap, record.ImageId);
if (updated is null)
{
_containsByPurl.Remove(key);
}
else
{
_containsByPurl[key] = updated;
}
}
}
foreach (var key in record.EntrypointComponents)
{
if (_usedByEntrypointByPurl.TryGetValue(key, out var bitmap))
{
var updated = RemoveImageFromBitmap(bitmap, record.ImageId);
if (updated is null)
{
_usedByEntrypointByPurl.Remove(key);
}
else
{
_usedByEntrypointByPurl[key] = updated;
}
}
}
}
private static RoaringBitmap AddImageToBitmap(RoaringBitmap? bitmap, int imageId)
{
if (bitmap is null)
{
return RoaringBitmap.Create(new[] { imageId });
}
if (bitmap.Any(id => id == imageId))
{
return bitmap;
}
var merged = bitmap
.Concat(new[] { imageId })
.Distinct()
.OrderBy(id => id)
.ToArray();
return RoaringBitmap.Create(merged);
}
private static RoaringBitmap? RemoveImageFromBitmap(RoaringBitmap bitmap, int imageId)
{
var remaining = bitmap
.Where(id => id != imageId)
.OrderBy(id => id)
.ToArray();
if (remaining.Length == 0)
{
return null;
}
return RoaringBitmap.Create(remaining);
}
private static bool MatchesScope(ImpactImageRecord image, Selector selector)
{
return selector.Scope switch
{
SelectorScope.AllImages => true,
SelectorScope.ByDigest => selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase),
SelectorScope.ByRepository => selector.Repositories.Any(repo =>
string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) ||
string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)),
SelectorScope.ByNamespace => !image.Namespaces.IsDefaultOrEmpty && selector.Namespaces.Any(ns => image.Namespaces.Contains(ns, StringComparer.OrdinalIgnoreCase)),
SelectorScope.ByLabels => selector.Labels.All(label =>
image.Labels.TryGetValue(label.Key, out var value) &&
(label.Values.Length == 0 || label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))),
_ => true,
};
}
private static bool MatchesTagPattern(string tag, string pattern)
{
if (string.IsNullOrWhiteSpace(pattern))
{
return false;
}
if (pattern == "*")
{
return true;
}
if (!pattern.Contains('*') && !pattern.Contains('?'))
{
return string.Equals(tag, pattern, StringComparison.OrdinalIgnoreCase);
}
var escaped = Regex.Escape(pattern)
.Replace("\\*", ".*")
.Replace("\\?", ".");
return Regex.IsMatch(tag, $"^{escaped}$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant);
}
private int EnsureImageId(string digest)
{
if (_imageIds.TryGetValue(digest, out var existing))
{
return existing;
}
var candidate = ComputeDeterministicId(digest);
while (_images.ContainsKey(candidate))
{
candidate = (candidate + 1) & int.MaxValue;
if (candidate == 0)
{
candidate = 1;
}
}
_imageIds[digest] = candidate;
return candidate;
}
private static int ComputeDeterministicId(string digest)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(digest));
for (var offset = 0; offset <= bytes.Length - sizeof(int); offset += sizeof(int))
{
var value = BinaryPrimitives.ReadInt32LittleEndian(bytes.AsSpan(offset, sizeof(int))) & int.MaxValue;
if (value != 0)
{
return value;
}
}
return digest.GetHashCode(StringComparison.OrdinalIgnoreCase) & int.MaxValue;
}
}

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="..\..\samples\scanner\images\**\bom-index.json"
Link="Fixtures\%(RecursiveDir)%(Filename)%(Extension)" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="RoaringBitmap" Version="0.0.9" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,10 @@
# Scheduler ImpactIndex Task Board (Sprint 16)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCHED-IMPACT-16-300 | DONE (2025-10-20) | Scheduler ImpactIndex Guild | SAMPLES-10-001 | **STUB** ingest/query using fixtures to unblock Scheduler planning (remove by SP16 end). | Stub merges fixture BOM-Index, query API returns deterministic results, removal note tracked. |
| SCHED-IMPACT-16-301 | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCANNER-EMIT-10-605 | Implement ingestion of per-image BOM-Index sidecars into roaring bitmap store (contains/usedBy). | Ingestion tests process sample SBOM index; bitmaps persisted; deterministic IDs assigned. |
| SCHED-IMPACT-16-302 | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Provide query APIs (ResolveByPurls, ResolveByVulns, ResolveAll, selectors) with tenant/namespace filters. | Query functions tested; performance benchmarks documented; selectors enforce filters. |
| SCHED-IMPACT-16-303 | TODO | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Snapshot/compaction + invalidation for removed images; persistence to RocksDB/Redis per architecture. | Snapshot routine implemented; invalidation tests pass; docs describe recovery. |
> Removal tracking note: see `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md` for follow-up actions once the roaring bitmap implementation lands.

View File

@@ -0,0 +1,4 @@
# StellaOps.Scheduler.Models — Agent Charter
## Mission
Define Scheduler DTOs (Schedule, Run, ImpactSet, Selector, DeltaSummary) per `docs/ARCHITECTURE_SCHEDULER.md`.

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Scheduler.ImpactIndex")]

View File

@@ -0,0 +1,120 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Audit log entry capturing schedule/run lifecycle events.
/// </summary>
public sealed record AuditRecord
{
public AuditRecord(
string id,
string tenantId,
string category,
string action,
DateTimeOffset occurredAt,
AuditActor actor,
string? entityId = null,
string? scheduleId = null,
string? runId = null,
string? correlationId = null,
IEnumerable<KeyValuePair<string, string>>? metadata = null,
string? message = null)
: this(
id,
tenantId,
Validation.EnsureSimpleIdentifier(category, nameof(category)),
Validation.EnsureSimpleIdentifier(action, nameof(action)),
Validation.NormalizeTimestamp(occurredAt),
actor,
Validation.TrimToNull(entityId),
Validation.TrimToNull(scheduleId),
Validation.TrimToNull(runId),
Validation.TrimToNull(correlationId),
Validation.NormalizeMetadata(metadata),
Validation.TrimToNull(message))
{
}
[JsonConstructor]
public AuditRecord(
string id,
string tenantId,
string category,
string action,
DateTimeOffset occurredAt,
AuditActor actor,
string? entityId,
string? scheduleId,
string? runId,
string? correlationId,
ImmutableSortedDictionary<string, string> metadata,
string? message)
{
Id = Validation.EnsureId(id, nameof(id));
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
Category = Validation.EnsureSimpleIdentifier(category, nameof(category));
Action = Validation.EnsureSimpleIdentifier(action, nameof(action));
OccurredAt = Validation.NormalizeTimestamp(occurredAt);
Actor = actor ?? throw new ArgumentNullException(nameof(actor));
EntityId = Validation.TrimToNull(entityId);
ScheduleId = Validation.TrimToNull(scheduleId);
RunId = Validation.TrimToNull(runId);
CorrelationId = Validation.TrimToNull(correlationId);
var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty;
Metadata = materializedMetadata.Count > 0
? materializedMetadata.WithComparers(StringComparer.Ordinal)
: ImmutableSortedDictionary<string, string>.Empty;
Message = Validation.TrimToNull(message);
}
public string Id { get; }
public string TenantId { get; }
public string Category { get; }
public string Action { get; }
public DateTimeOffset OccurredAt { get; }
public AuditActor Actor { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? EntityId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ScheduleId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RunId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CorrelationId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Message { get; }
}
/// <summary>
/// Actor associated with an audit entry.
/// </summary>
public sealed record AuditActor
{
public AuditActor(string actorId, string displayName, string kind)
{
ActorId = Validation.EnsureSimpleIdentifier(actorId, nameof(actorId));
DisplayName = Validation.EnsureName(displayName, nameof(displayName));
Kind = Validation.EnsureSimpleIdentifier(kind, nameof(kind));
}
public string ActorId { get; }
public string DisplayName { get; }
public string Kind { get; }
}

View File

@@ -0,0 +1,470 @@
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.Json.Serialization.Metadata;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Deterministic serializer for scheduler DTOs.
/// </summary>
public static class CanonicalJsonSerializer
{
private static readonly JsonSerializerOptions CompactOptions = CreateOptions(writeIndented: false);
private static readonly JsonSerializerOptions PrettyOptions = CreateOptions(writeIndented: true);
private static readonly IReadOnlyDictionary<Type, string[]> PropertyOrder = new Dictionary<Type, string[]>
{
[typeof(Schedule)] = new[]
{
"schemaVersion",
"id",
"tenantId",
"name",
"enabled",
"cronExpression",
"timezone",
"mode",
"selection",
"onlyIf",
"notify",
"limits",
"subscribers",
"createdAt",
"createdBy",
"updatedAt",
"updatedBy",
},
[typeof(Selector)] = new[]
{
"scope",
"tenantId",
"namespaces",
"repositories",
"digests",
"includeTags",
"labels",
"resolvesTags",
},
[typeof(LabelSelector)] = new[]
{
"key",
"values",
},
[typeof(ScheduleOnlyIf)] = new[]
{
"lastReportOlderThanDays",
"policyRevision",
},
[typeof(ScheduleNotify)] = new[]
{
"onNewFindings",
"minSeverity",
"includeKev",
"includeQuietFindings",
},
[typeof(ScheduleLimits)] = new[]
{
"maxJobs",
"ratePerSecond",
"parallelism",
"burst",
},
[typeof(Run)] = new[]
{
"schemaVersion",
"id",
"tenantId",
"scheduleId",
"trigger",
"state",
"stats",
"reason",
"createdAt",
"startedAt",
"finishedAt",
"error",
"deltas",
},
[typeof(RunStats)] = new[]
{
"candidates",
"deduped",
"queued",
"completed",
"deltas",
"newCriticals",
"newHigh",
"newMedium",
"newLow",
},
[typeof(RunReason)] = new[]
{
"manualReason",
"feedserExportId",
"vexerExportId",
"cursor",
"impactWindowFrom",
"impactWindowTo",
},
[typeof(DeltaSummary)] = new[]
{
"imageDigest",
"newFindings",
"newCriticals",
"newHigh",
"newMedium",
"newLow",
"kevHits",
"topFindings",
"reportUrl",
"attestation",
"detectedAt",
},
[typeof(DeltaFinding)] = new[]
{
"purl",
"vulnerabilityId",
"severity",
"link",
},
[typeof(ImpactSet)] = new[]
{
"schemaVersion",
"selector",
"images",
"usageOnly",
"generatedAt",
"total",
"snapshotId",
},
[typeof(ImpactImage)] = new[]
{
"imageDigest",
"registry",
"repository",
"namespaces",
"tags",
"usedByEntrypoint",
"labels",
},
[typeof(AuditRecord)] = new[]
{
"id",
"tenantId",
"category",
"action",
"occurredAt",
"actor",
"entityId",
"scheduleId",
"runId",
"correlationId",
"metadata",
"message",
},
[typeof(AuditActor)] = new[]
{
"actorId",
"displayName",
"kind",
},
[typeof(GraphBuildJob)] = new[]
{
"schemaVersion",
"id",
"tenantId",
"sbomId",
"sbomVersionId",
"sbomDigest",
"graphSnapshotId",
"status",
"trigger",
"attempts",
"cartographerJobId",
"correlationId",
"createdAt",
"startedAt",
"completedAt",
"error",
"metadata",
},
[typeof(GraphOverlayJob)] = new[]
{
"schemaVersion",
"id",
"tenantId",
"graphSnapshotId",
"buildJobId",
"overlayKind",
"overlayKey",
"subjects",
"status",
"trigger",
"attempts",
"correlationId",
"createdAt",
"startedAt",
"completedAt",
"error",
"metadata",
},
[typeof(PolicyRunRequest)] = new[]
{
"schemaVersion",
"tenantId",
"policyId",
"policyVersion",
"mode",
"priority",
"runId",
"queuedAt",
"requestedBy",
"correlationId",
"metadata",
"inputs",
},
[typeof(PolicyRunInputs)] = new[]
{
"sbomSet",
"advisoryCursor",
"vexCursor",
"environment",
"captureExplain",
},
[typeof(PolicyRunStatus)] = new[]
{
"schemaVersion",
"runId",
"tenantId",
"policyId",
"policyVersion",
"mode",
"status",
"priority",
"queuedAt",
"startedAt",
"finishedAt",
"determinismHash",
"errorCode",
"error",
"attempts",
"traceId",
"explainUri",
"metadata",
"stats",
"inputs",
},
[typeof(PolicyRunJob)] = new[]
{
"schemaVersion",
"id",
"tenantId",
"policyId",
"policyVersion",
"mode",
"priority",
"priorityRank",
"runId",
"queuedAt",
"requestedBy",
"correlationId",
"metadata",
"inputs",
"status",
"attemptCount",
"lastAttemptAt",
"lastError",
"createdAt",
"updatedAt",
"availableAt",
"submittedAt",
"completedAt",
"leaseOwner",
"leaseExpiresAt",
"cancellationRequested",
"cancellationRequestedAt",
"cancellationReason",
"cancelledAt",
},
[typeof(PolicyRunStats)] = new[]
{
"components",
"rulesFired",
"findingsWritten",
"vexOverrides",
"quieted",
"suppressed",
"durationSeconds",
},
[typeof(PolicyDiffSummary)] = new[]
{
"schemaVersion",
"added",
"removed",
"unchanged",
"bySeverity",
"ruleHits",
},
[typeof(PolicyDiffSeverityDelta)] = new[]
{
"up",
"down",
},
[typeof(PolicyDiffRuleDelta)] = new[]
{
"ruleId",
"ruleName",
"up",
"down",
},
[typeof(PolicyExplainTrace)] = new[]
{
"schemaVersion",
"findingId",
"policyId",
"policyVersion",
"tenantId",
"runId",
"evaluatedAt",
"verdict",
"ruleChain",
"evidence",
"vexImpacts",
"history",
"metadata",
},
[typeof(PolicyExplainVerdict)] = new[]
{
"status",
"severity",
"quiet",
"score",
"rationale",
},
[typeof(PolicyExplainRule)] = new[]
{
"ruleId",
"ruleName",
"action",
"decision",
"score",
"condition",
},
[typeof(PolicyExplainEvidence)] = new[]
{
"type",
"reference",
"source",
"status",
"weight",
"justification",
"metadata",
},
[typeof(PolicyExplainVexImpact)] = new[]
{
"statementId",
"provider",
"status",
"accepted",
"justification",
"confidence",
},
[typeof(PolicyExplainHistoryEvent)] = new[]
{
"status",
"occurredAt",
"actor",
"note",
},
};
public static string Serialize<T>(T value)
=> JsonSerializer.Serialize(value, CompactOptions);
public static string SerializeIndented<T>(T value)
=> JsonSerializer.Serialize(value, PrettyOptions);
public static T Deserialize<T>(string json)
=> JsonSerializer.Deserialize<T>(json, PrettyOptions)
?? throw new InvalidOperationException($"Unable to deserialize {typeof(T).Name}.");
private static JsonSerializerOptions CreateOptions(bool writeIndented)
{
var options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DictionaryKeyPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = writeIndented,
DefaultIgnoreCondition = JsonIgnoreCondition.Never,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
};
var resolver = options.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver();
options.TypeInfoResolver = new DeterministicResolver(resolver);
options.Converters.Add(new ScheduleModeConverter());
options.Converters.Add(new SelectorScopeConverter());
options.Converters.Add(new RunTriggerConverter());
options.Converters.Add(new RunStateConverter());
options.Converters.Add(new SeverityRankConverter());
options.Converters.Add(new GraphJobStatusConverter());
options.Converters.Add(new GraphBuildJobTriggerConverter());
options.Converters.Add(new GraphOverlayJobTriggerConverter());
options.Converters.Add(new GraphOverlayKindConverter());
options.Converters.Add(new PolicyRunModeConverter());
options.Converters.Add(new PolicyRunPriorityConverter());
options.Converters.Add(new PolicyRunExecutionStatusConverter());
options.Converters.Add(new PolicyVerdictStatusConverter());
options.Converters.Add(new PolicyRunJobStatusConverter());
return options;
}
private sealed class DeterministicResolver : IJsonTypeInfoResolver
{
private readonly IJsonTypeInfoResolver _inner;
public DeterministicResolver(IJsonTypeInfoResolver inner)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
}
public JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions options)
{
var info = _inner.GetTypeInfo(type, options);
if (info is null)
{
throw new InvalidOperationException($"Unable to resolve JsonTypeInfo for '{type}'.");
}
if (info.Kind is JsonTypeInfoKind.Object && info.Properties.Count > 1)
{
var ordered = info.Properties
.OrderBy(property => ResolveOrder(type, property.Name))
.ThenBy(property => property.Name, StringComparer.Ordinal)
.ToArray();
info.Properties.Clear();
foreach (var property in ordered)
{
info.Properties.Add(property);
}
}
return info;
}
private static int ResolveOrder(Type type, string propertyName)
{
if (PropertyOrder.TryGetValue(type, out var order))
{
var index = Array.IndexOf(order, propertyName);
if (index >= 0)
{
return index;
}
}
return int.MaxValue;
}
}
}

View File

@@ -0,0 +1,201 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
internal sealed class ScheduleModeConverter : HyphenatedEnumConverter<ScheduleMode>
{
protected override IReadOnlyDictionary<ScheduleMode, string> Map { get; } = new Dictionary<ScheduleMode, string>
{
[ScheduleMode.AnalysisOnly] = "analysis-only",
[ScheduleMode.ContentRefresh] = "content-refresh",
};
}
internal sealed class SelectorScopeConverter : HyphenatedEnumConverter<SelectorScope>
{
protected override IReadOnlyDictionary<SelectorScope, string> Map { get; } = new Dictionary<SelectorScope, string>
{
[SelectorScope.AllImages] = "all-images",
[SelectorScope.ByNamespace] = "by-namespace",
[SelectorScope.ByRepository] = "by-repo",
[SelectorScope.ByDigest] = "by-digest",
[SelectorScope.ByLabels] = "by-labels",
};
}
internal sealed class RunTriggerConverter : LowerCaseEnumConverter<RunTrigger>
{
}
internal sealed class RunStateConverter : LowerCaseEnumConverter<RunState>
{
}
internal sealed class SeverityRankConverter : LowerCaseEnumConverter<SeverityRank>
{
protected override string ConvertToString(SeverityRank value)
=> value switch
{
SeverityRank.None => "none",
SeverityRank.Info => "info",
SeverityRank.Low => "low",
SeverityRank.Medium => "medium",
SeverityRank.High => "high",
SeverityRank.Critical => "critical",
SeverityRank.Unknown => "unknown",
_ => throw new ArgumentOutOfRangeException(nameof(value), value, null),
};
}
internal sealed class GraphJobStatusConverter : LowerCaseEnumConverter<GraphJobStatus>
{
}
internal sealed class GraphBuildJobTriggerConverter : HyphenatedEnumConverter<GraphBuildJobTrigger>
{
protected override IReadOnlyDictionary<GraphBuildJobTrigger, string> Map { get; } = new Dictionary<GraphBuildJobTrigger, string>
{
[GraphBuildJobTrigger.SbomVersion] = "sbom-version",
[GraphBuildJobTrigger.Backfill] = "backfill",
[GraphBuildJobTrigger.Manual] = "manual",
};
}
internal sealed class GraphOverlayJobTriggerConverter : HyphenatedEnumConverter<GraphOverlayJobTrigger>
{
protected override IReadOnlyDictionary<GraphOverlayJobTrigger, string> Map { get; } = new Dictionary<GraphOverlayJobTrigger, string>
{
[GraphOverlayJobTrigger.Policy] = "policy",
[GraphOverlayJobTrigger.Advisory] = "advisory",
[GraphOverlayJobTrigger.Vex] = "vex",
[GraphOverlayJobTrigger.SbomVersion] = "sbom-version",
[GraphOverlayJobTrigger.Manual] = "manual",
};
}
internal sealed class GraphOverlayKindConverter : LowerCaseEnumConverter<GraphOverlayKind>
{
}
internal sealed class PolicyRunModeConverter : LowerCaseEnumConverter<PolicyRunMode>
{
}
internal sealed class PolicyRunPriorityConverter : LowerCaseEnumConverter<PolicyRunPriority>
{
}
internal sealed class PolicyRunExecutionStatusConverter : JsonConverter<PolicyRunExecutionStatus>
{
private static readonly IReadOnlyDictionary<string, PolicyRunExecutionStatus> Reverse = new Dictionary<string, PolicyRunExecutionStatus>(StringComparer.OrdinalIgnoreCase)
{
["queued"] = PolicyRunExecutionStatus.Queued,
["running"] = PolicyRunExecutionStatus.Running,
["succeeded"] = PolicyRunExecutionStatus.Succeeded,
["failed"] = PolicyRunExecutionStatus.Failed,
["canceled"] = PolicyRunExecutionStatus.Cancelled,
["cancelled"] = PolicyRunExecutionStatus.Cancelled,
["replay_pending"] = PolicyRunExecutionStatus.ReplayPending,
["replay-pending"] = PolicyRunExecutionStatus.ReplayPending,
};
private static readonly IReadOnlyDictionary<PolicyRunExecutionStatus, string> Forward = new Dictionary<PolicyRunExecutionStatus, string>
{
[PolicyRunExecutionStatus.Queued] = "queued",
[PolicyRunExecutionStatus.Running] = "running",
[PolicyRunExecutionStatus.Succeeded] = "succeeded",
[PolicyRunExecutionStatus.Failed] = "failed",
[PolicyRunExecutionStatus.Cancelled] = "canceled",
[PolicyRunExecutionStatus.ReplayPending] = "replay_pending",
};
public override PolicyRunExecutionStatus Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
var value = reader.GetString();
if (value is not null && Reverse.TryGetValue(value, out var status))
{
return status;
}
throw new JsonException($"Value '{value}' is not a valid {nameof(PolicyRunExecutionStatus)}.");
}
public override void Write(Utf8JsonWriter writer, PolicyRunExecutionStatus value, JsonSerializerOptions options)
{
if (!Forward.TryGetValue(value, out var text))
{
throw new JsonException($"Unable to serialize {nameof(PolicyRunExecutionStatus)} value '{value}'.");
}
writer.WriteStringValue(text);
}
}
internal sealed class PolicyVerdictStatusConverter : LowerCaseEnumConverter<PolicyVerdictStatus>
{
}
internal sealed class PolicyRunJobStatusConverter : LowerCaseEnumConverter<PolicyRunJobStatus>
{
}
internal abstract class HyphenatedEnumConverter<TEnum> : JsonConverter<TEnum>
where TEnum : struct, Enum
{
private readonly Dictionary<string, TEnum> _reverse;
protected HyphenatedEnumConverter()
{
_reverse = Map.ToDictionary(static pair => pair.Value, static pair => pair.Key, StringComparer.OrdinalIgnoreCase);
}
protected abstract IReadOnlyDictionary<TEnum, string> Map { get; }
public override TEnum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
var value = reader.GetString();
if (value is not null && _reverse.TryGetValue(value, out var parsed))
{
return parsed;
}
throw new JsonException($"Value '{value}' is not a valid {typeof(TEnum).Name}.");
}
public override void Write(Utf8JsonWriter writer, TEnum value, JsonSerializerOptions options)
{
if (Map.TryGetValue(value, out var text))
{
writer.WriteStringValue(text);
return;
}
throw new JsonException($"Unable to serialize {typeof(TEnum).Name} value '{value}'.");
}
}
internal class LowerCaseEnumConverter<TEnum> : JsonConverter<TEnum>
where TEnum : struct, Enum
{
private static readonly Dictionary<string, TEnum> Reverse = Enum
.GetValues<TEnum>()
.ToDictionary(static value => value.ToString().ToLowerInvariant(), static value => value, StringComparer.OrdinalIgnoreCase);
public override TEnum Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
var value = reader.GetString();
if (value is not null && Reverse.TryGetValue(value, out var parsed))
{
return parsed;
}
throw new JsonException($"Value '{value}' is not a valid {typeof(TEnum).Name}.");
}
public override void Write(Utf8JsonWriter writer, TEnum value, JsonSerializerOptions options)
=> writer.WriteStringValue(ConvertToString(value));
protected virtual string ConvertToString(TEnum value)
=> value.ToString().ToLowerInvariant();
}

View File

@@ -0,0 +1,179 @@
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Execution mode for a schedule.
/// </summary>
[JsonConverter(typeof(ScheduleModeConverter))]
public enum ScheduleMode
{
AnalysisOnly,
ContentRefresh,
}
/// <summary>
/// Selector scope determining which filters are applied.
/// </summary>
[JsonConverter(typeof(SelectorScopeConverter))]
public enum SelectorScope
{
AllImages,
ByNamespace,
ByRepository,
ByDigest,
ByLabels,
}
/// <summary>
/// Source that triggered a run.
/// </summary>
[JsonConverter(typeof(RunTriggerConverter))]
public enum RunTrigger
{
Cron,
Feedser,
Vexer,
Manual,
}
/// <summary>
/// Lifecycle state of a scheduler run.
/// </summary>
[JsonConverter(typeof(RunStateConverter))]
public enum RunState
{
Planning,
Queued,
Running,
Completed,
Error,
Cancelled,
}
/// <summary>
/// Severity rankings used in scheduler payloads.
/// </summary>
[JsonConverter(typeof(SeverityRankConverter))]
public enum SeverityRank
{
None = 0,
Info = 1,
Low = 2,
Medium = 3,
High = 4,
Critical = 5,
Unknown = 6,
}
/// <summary>
/// Status lifecycle shared by graph build and overlay jobs.
/// </summary>
[JsonConverter(typeof(GraphJobStatusConverter))]
public enum GraphJobStatus
{
Pending,
Queued,
Running,
Completed,
Failed,
Cancelled,
}
/// <summary>
/// Trigger indicating why a graph build job was enqueued.
/// </summary>
[JsonConverter(typeof(GraphBuildJobTriggerConverter))]
public enum GraphBuildJobTrigger
{
SbomVersion,
Backfill,
Manual,
}
/// <summary>
/// Trigger indicating why a graph overlay job was enqueued.
/// </summary>
[JsonConverter(typeof(GraphOverlayJobTriggerConverter))]
public enum GraphOverlayJobTrigger
{
Policy,
Advisory,
Vex,
SbomVersion,
Manual,
}
/// <summary>
/// Overlay category applied to a graph snapshot.
/// </summary>
[JsonConverter(typeof(GraphOverlayKindConverter))]
public enum GraphOverlayKind
{
Policy,
Advisory,
Vex,
}
/// <summary>
/// Mode for policy runs executed by the Policy Engine.
/// </summary>
[JsonConverter(typeof(PolicyRunModeConverter))]
public enum PolicyRunMode
{
Full,
Incremental,
Simulate,
}
/// <summary>
/// Priority assigned to a policy run request.
/// </summary>
[JsonConverter(typeof(PolicyRunPriorityConverter))]
public enum PolicyRunPriority
{
Normal,
High,
Emergency,
}
/// <summary>
/// Execution status for policy runs tracked in policy_runs.
/// </summary>
[JsonConverter(typeof(PolicyRunExecutionStatusConverter))]
public enum PolicyRunExecutionStatus
{
Queued,
Running,
Succeeded,
Failed,
Cancelled,
ReplayPending,
}
/// <summary>
/// Resulting verdict for a policy evaluation.
/// </summary>
[JsonConverter(typeof(PolicyVerdictStatusConverter))]
public enum PolicyVerdictStatus
{
Passed,
Warned,
Blocked,
Quieted,
Ignored,
}
/// <summary>
/// Lifecycle status for scheduler policy run jobs.
/// </summary>
[JsonConverter(typeof(PolicyRunJobStatusConverter))]
public enum PolicyRunJobStatus
{
Pending,
Dispatching,
Submitted,
Completed,
Failed,
Cancelled,
}

View File

@@ -0,0 +1,132 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Job instructing Cartographer to materialize a graph snapshot for an SBOM version.
/// </summary>
public sealed record GraphBuildJob
{
public GraphBuildJob(
string id,
string tenantId,
string sbomId,
string sbomVersionId,
string sbomDigest,
GraphJobStatus status,
GraphBuildJobTrigger trigger,
DateTimeOffset createdAt,
string? graphSnapshotId = null,
int attempts = 0,
string? cartographerJobId = null,
string? correlationId = null,
DateTimeOffset? startedAt = null,
DateTimeOffset? completedAt = null,
string? error = null,
IEnumerable<KeyValuePair<string, string>>? metadata = null,
string? schemaVersion = null)
: this(
id,
tenantId,
sbomId,
sbomVersionId,
sbomDigest,
Validation.TrimToNull(graphSnapshotId),
status,
trigger,
Validation.EnsureNonNegative(attempts, nameof(attempts)),
Validation.TrimToNull(cartographerJobId),
Validation.TrimToNull(correlationId),
Validation.NormalizeTimestamp(createdAt),
Validation.NormalizeTimestamp(startedAt),
Validation.NormalizeTimestamp(completedAt),
Validation.TrimToNull(error),
Validation.NormalizeMetadata(metadata),
schemaVersion)
{
}
[JsonConstructor]
public GraphBuildJob(
string id,
string tenantId,
string sbomId,
string sbomVersionId,
string sbomDigest,
string? graphSnapshotId,
GraphJobStatus status,
GraphBuildJobTrigger trigger,
int attempts,
string? cartographerJobId,
string? correlationId,
DateTimeOffset createdAt,
DateTimeOffset? startedAt,
DateTimeOffset? completedAt,
string? error,
ImmutableSortedDictionary<string, string> metadata,
string? schemaVersion = null)
{
Id = Validation.EnsureId(id, nameof(id));
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
SbomId = Validation.EnsureId(sbomId, nameof(sbomId));
SbomVersionId = Validation.EnsureId(sbomVersionId, nameof(sbomVersionId));
SbomDigest = Validation.EnsureDigestFormat(sbomDigest, nameof(sbomDigest));
GraphSnapshotId = Validation.TrimToNull(graphSnapshotId);
Status = status;
Trigger = trigger;
Attempts = Validation.EnsureNonNegative(attempts, nameof(attempts));
CartographerJobId = Validation.TrimToNull(cartographerJobId);
CorrelationId = Validation.TrimToNull(correlationId);
CreatedAt = Validation.NormalizeTimestamp(createdAt);
StartedAt = Validation.NormalizeTimestamp(startedAt);
CompletedAt = Validation.NormalizeTimestamp(completedAt);
Error = Validation.TrimToNull(error);
var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty;
Metadata = materializedMetadata.Count > 0
? materializedMetadata.WithComparers(StringComparer.Ordinal)
: ImmutableSortedDictionary<string, string>.Empty;
SchemaVersion = SchedulerSchemaVersions.EnsureGraphBuildJob(schemaVersion);
}
public string SchemaVersion { get; }
public string Id { get; }
public string TenantId { get; }
public string SbomId { get; }
public string SbomVersionId { get; }
public string SbomDigest { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? GraphSnapshotId { get; init; }
public GraphJobStatus Status { get; init; }
public GraphBuildJobTrigger Trigger { get; }
public int Attempts { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CartographerJobId { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CorrelationId { get; init; }
public DateTimeOffset CreatedAt { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? StartedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CompletedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Error { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty;
}

View File

@@ -0,0 +1,241 @@
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Encapsulates allowed status transitions and invariants for graph jobs.
/// </summary>
public static class GraphJobStateMachine
{
private static readonly IReadOnlyDictionary<GraphJobStatus, GraphJobStatus[]> Adjacency = new Dictionary<GraphJobStatus, GraphJobStatus[]>
{
[GraphJobStatus.Pending] = new[] { GraphJobStatus.Pending, GraphJobStatus.Queued, GraphJobStatus.Running, GraphJobStatus.Failed, GraphJobStatus.Cancelled },
[GraphJobStatus.Queued] = new[] { GraphJobStatus.Queued, GraphJobStatus.Running, GraphJobStatus.Failed, GraphJobStatus.Cancelled },
[GraphJobStatus.Running] = new[] { GraphJobStatus.Running, GraphJobStatus.Completed, GraphJobStatus.Failed, GraphJobStatus.Cancelled },
[GraphJobStatus.Completed] = new[] { GraphJobStatus.Completed },
[GraphJobStatus.Failed] = new[] { GraphJobStatus.Failed },
[GraphJobStatus.Cancelled] = new[] { GraphJobStatus.Cancelled },
};
public static bool CanTransition(GraphJobStatus from, GraphJobStatus to)
{
if (!Adjacency.TryGetValue(from, out var allowed))
{
return false;
}
return allowed.Contains(to);
}
public static bool IsTerminal(GraphJobStatus status)
=> status is GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled;
public static GraphBuildJob EnsureTransition(
GraphBuildJob job,
GraphJobStatus next,
DateTimeOffset timestamp,
int? attempts = null,
string? errorMessage = null)
{
ArgumentNullException.ThrowIfNull(job);
var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp);
var current = job.Status;
if (!CanTransition(current, next))
{
throw new InvalidOperationException($"Graph build job transition from '{current}' to '{next}' is not allowed.");
}
var nextAttempts = attempts ?? job.Attempts;
if (nextAttempts < job.Attempts)
{
throw new InvalidOperationException("Graph job attempts cannot decrease.");
}
var startedAt = job.StartedAt;
var completedAt = job.CompletedAt;
if (current != GraphJobStatus.Running && next == GraphJobStatus.Running && startedAt is null)
{
startedAt = normalizedTimestamp;
}
if (IsTerminal(next))
{
completedAt ??= normalizedTimestamp;
}
string? nextError = null;
if (next == GraphJobStatus.Failed)
{
var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? job.Error : errorMessage.Trim();
if (string.IsNullOrWhiteSpace(effectiveError))
{
throw new InvalidOperationException("Transitioning to Failed requires a non-empty error message.");
}
nextError = effectiveError;
}
else if (!string.IsNullOrWhiteSpace(errorMessage))
{
throw new InvalidOperationException("Error message can only be provided when transitioning to Failed state.");
}
var updated = job with
{
Status = next,
Attempts = nextAttempts,
StartedAt = startedAt,
CompletedAt = completedAt,
Error = nextError,
};
Validate(updated);
return updated;
}
public static GraphOverlayJob EnsureTransition(
GraphOverlayJob job,
GraphJobStatus next,
DateTimeOffset timestamp,
int? attempts = null,
string? errorMessage = null)
{
ArgumentNullException.ThrowIfNull(job);
var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp);
var current = job.Status;
if (!CanTransition(current, next))
{
throw new InvalidOperationException($"Graph overlay job transition from '{current}' to '{next}' is not allowed.");
}
var nextAttempts = attempts ?? job.Attempts;
if (nextAttempts < job.Attempts)
{
throw new InvalidOperationException("Graph job attempts cannot decrease.");
}
var startedAt = job.StartedAt;
var completedAt = job.CompletedAt;
if (current != GraphJobStatus.Running && next == GraphJobStatus.Running && startedAt is null)
{
startedAt = normalizedTimestamp;
}
if (IsTerminal(next))
{
completedAt ??= normalizedTimestamp;
}
string? nextError = null;
if (next == GraphJobStatus.Failed)
{
var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? job.Error : errorMessage.Trim();
if (string.IsNullOrWhiteSpace(effectiveError))
{
throw new InvalidOperationException("Transitioning to Failed requires a non-empty error message.");
}
nextError = effectiveError;
}
else if (!string.IsNullOrWhiteSpace(errorMessage))
{
throw new InvalidOperationException("Error message can only be provided when transitioning to Failed state.");
}
var updated = job with
{
Status = next,
Attempts = nextAttempts,
StartedAt = startedAt,
CompletedAt = completedAt,
Error = nextError,
};
Validate(updated);
return updated;
}
public static void Validate(GraphBuildJob job)
{
ArgumentNullException.ThrowIfNull(job);
if (job.StartedAt is { } started && started < job.CreatedAt)
{
throw new InvalidOperationException("GraphBuildJob.StartedAt cannot be earlier than CreatedAt.");
}
if (job.CompletedAt is { } completed)
{
if (job.StartedAt is { } start && completed < start)
{
throw new InvalidOperationException("GraphBuildJob.CompletedAt cannot be earlier than StartedAt.");
}
if (!IsTerminal(job.Status))
{
throw new InvalidOperationException("GraphBuildJob.CompletedAt set while status is not terminal.");
}
}
else if (IsTerminal(job.Status))
{
throw new InvalidOperationException("Terminal graph build job states must include CompletedAt.");
}
if (job.Status == GraphJobStatus.Failed)
{
if (string.IsNullOrWhiteSpace(job.Error))
{
throw new InvalidOperationException("GraphBuildJob.Error must be populated when status is Failed.");
}
}
else if (!string.IsNullOrWhiteSpace(job.Error))
{
throw new InvalidOperationException("GraphBuildJob.Error must be null for non-failed states.");
}
}
public static void Validate(GraphOverlayJob job)
{
ArgumentNullException.ThrowIfNull(job);
if (job.StartedAt is { } started && started < job.CreatedAt)
{
throw new InvalidOperationException("GraphOverlayJob.StartedAt cannot be earlier than CreatedAt.");
}
if (job.CompletedAt is { } completed)
{
if (job.StartedAt is { } start && completed < start)
{
throw new InvalidOperationException("GraphOverlayJob.CompletedAt cannot be earlier than StartedAt.");
}
if (!IsTerminal(job.Status))
{
throw new InvalidOperationException("GraphOverlayJob.CompletedAt set while status is not terminal.");
}
}
else if (IsTerminal(job.Status))
{
throw new InvalidOperationException("Terminal graph overlay job states must include CompletedAt.");
}
if (job.Status == GraphJobStatus.Failed)
{
if (string.IsNullOrWhiteSpace(job.Error))
{
throw new InvalidOperationException("GraphOverlayJob.Error must be populated when status is Failed.");
}
}
else if (!string.IsNullOrWhiteSpace(job.Error))
{
throw new InvalidOperationException("GraphOverlayJob.Error must be null for non-failed states.");
}
}
}

View File

@@ -0,0 +1,132 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Job that materializes or refreshes an overlay on top of an existing graph snapshot.
/// </summary>
public sealed record GraphOverlayJob
{
public GraphOverlayJob(
string id,
string tenantId,
string graphSnapshotId,
GraphOverlayKind overlayKind,
string overlayKey,
GraphJobStatus status,
GraphOverlayJobTrigger trigger,
DateTimeOffset createdAt,
IEnumerable<string>? subjects = null,
int attempts = 0,
string? buildJobId = null,
string? correlationId = null,
DateTimeOffset? startedAt = null,
DateTimeOffset? completedAt = null,
string? error = null,
IEnumerable<KeyValuePair<string, string>>? metadata = null,
string? schemaVersion = null)
: this(
id,
tenantId,
graphSnapshotId,
Validation.TrimToNull(buildJobId),
overlayKind,
Validation.EnsureNotNullOrWhiteSpace(overlayKey, nameof(overlayKey)),
Validation.NormalizeStringSet(subjects, nameof(subjects)),
status,
trigger,
Validation.EnsureNonNegative(attempts, nameof(attempts)),
Validation.TrimToNull(correlationId),
Validation.NormalizeTimestamp(createdAt),
Validation.NormalizeTimestamp(startedAt),
Validation.NormalizeTimestamp(completedAt),
Validation.TrimToNull(error),
Validation.NormalizeMetadata(metadata),
schemaVersion)
{
}
[JsonConstructor]
public GraphOverlayJob(
string id,
string tenantId,
string graphSnapshotId,
string? buildJobId,
GraphOverlayKind overlayKind,
string overlayKey,
ImmutableArray<string> subjects,
GraphJobStatus status,
GraphOverlayJobTrigger trigger,
int attempts,
string? correlationId,
DateTimeOffset createdAt,
DateTimeOffset? startedAt,
DateTimeOffset? completedAt,
string? error,
ImmutableSortedDictionary<string, string> metadata,
string? schemaVersion = null)
{
Id = Validation.EnsureId(id, nameof(id));
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
GraphSnapshotId = Validation.EnsureId(graphSnapshotId, nameof(graphSnapshotId));
BuildJobId = Validation.TrimToNull(buildJobId);
OverlayKind = overlayKind;
OverlayKey = Validation.EnsureNotNullOrWhiteSpace(overlayKey, nameof(overlayKey));
Subjects = subjects.IsDefault ? ImmutableArray<string>.Empty : subjects;
Status = status;
Trigger = trigger;
Attempts = Validation.EnsureNonNegative(attempts, nameof(attempts));
CorrelationId = Validation.TrimToNull(correlationId);
CreatedAt = Validation.NormalizeTimestamp(createdAt);
StartedAt = Validation.NormalizeTimestamp(startedAt);
CompletedAt = Validation.NormalizeTimestamp(completedAt);
Error = Validation.TrimToNull(error);
var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty;
Metadata = materializedMetadata.Count > 0
? materializedMetadata.WithComparers(StringComparer.Ordinal)
: ImmutableSortedDictionary<string, string>.Empty;
SchemaVersion = SchedulerSchemaVersions.EnsureGraphOverlayJob(schemaVersion);
}
public string SchemaVersion { get; }
public string Id { get; }
public string TenantId { get; }
public string GraphSnapshotId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? BuildJobId { get; init; }
public GraphOverlayKind OverlayKind { get; }
public string OverlayKey { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Subjects { get; } = ImmutableArray<string>.Empty;
public GraphJobStatus Status { get; init; }
public GraphOverlayJobTrigger Trigger { get; }
public int Attempts { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CorrelationId { get; init; }
public DateTimeOffset CreatedAt { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? StartedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CompletedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Error { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty;
}

View File

@@ -0,0 +1,138 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Result from resolving impacted images for a selector.
/// </summary>
public sealed record ImpactSet
{
public ImpactSet(
Selector selector,
IEnumerable<ImpactImage> images,
bool usageOnly,
DateTimeOffset generatedAt,
int? total = null,
string? snapshotId = null,
string? schemaVersion = null)
: this(
selector,
NormalizeImages(images),
usageOnly,
Validation.NormalizeTimestamp(generatedAt),
total ?? images.Count(),
Validation.TrimToNull(snapshotId),
schemaVersion)
{
}
[JsonConstructor]
public ImpactSet(
Selector selector,
ImmutableArray<ImpactImage> images,
bool usageOnly,
DateTimeOffset generatedAt,
int total,
string? snapshotId,
string? schemaVersion = null)
{
Selector = selector ?? throw new ArgumentNullException(nameof(selector));
Images = images.IsDefault ? ImmutableArray<ImpactImage>.Empty : images;
UsageOnly = usageOnly;
GeneratedAt = Validation.NormalizeTimestamp(generatedAt);
Total = Validation.EnsureNonNegative(total, nameof(total));
SnapshotId = Validation.TrimToNull(snapshotId);
SchemaVersion = SchedulerSchemaVersions.EnsureImpactSet(schemaVersion);
}
public string SchemaVersion { get; }
public Selector Selector { get; }
public ImmutableArray<ImpactImage> Images { get; } = ImmutableArray<ImpactImage>.Empty;
public bool UsageOnly { get; }
public DateTimeOffset GeneratedAt { get; }
public int Total { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SnapshotId { get; }
private static ImmutableArray<ImpactImage> NormalizeImages(IEnumerable<ImpactImage> images)
{
ArgumentNullException.ThrowIfNull(images);
return images
.Where(static image => image is not null)
.Select(static image => image!)
.OrderBy(static image => image.ImageDigest, StringComparer.Ordinal)
.ToImmutableArray();
}
}
/// <summary>
/// Impacted image descriptor returned from the impact index.
/// </summary>
public sealed record ImpactImage
{
public ImpactImage(
string imageDigest,
string registry,
string repository,
IEnumerable<string>? namespaces = null,
IEnumerable<string>? tags = null,
bool usedByEntrypoint = false,
IEnumerable<KeyValuePair<string, string>>? labels = null)
: this(
Validation.EnsureDigestFormat(imageDigest, nameof(imageDigest)),
Validation.EnsureSimpleIdentifier(registry, nameof(registry)),
Validation.EnsureSimpleIdentifier(repository, nameof(repository)),
Validation.NormalizeStringSet(namespaces, nameof(namespaces)),
Validation.NormalizeTagPatterns(tags),
usedByEntrypoint,
Validation.NormalizeMetadata(labels))
{
}
[JsonConstructor]
public ImpactImage(
string imageDigest,
string registry,
string repository,
ImmutableArray<string> namespaces,
ImmutableArray<string> tags,
bool usedByEntrypoint,
ImmutableSortedDictionary<string, string> labels)
{
ImageDigest = Validation.EnsureDigestFormat(imageDigest, nameof(imageDigest));
Registry = Validation.EnsureSimpleIdentifier(registry, nameof(registry));
Repository = Validation.EnsureSimpleIdentifier(repository, nameof(repository));
Namespaces = namespaces.IsDefault ? ImmutableArray<string>.Empty : namespaces;
Tags = tags.IsDefault ? ImmutableArray<string>.Empty : tags;
UsedByEntrypoint = usedByEntrypoint;
var materializedLabels = labels ?? ImmutableSortedDictionary<string, string>.Empty;
Labels = materializedLabels.Count > 0
? materializedLabels.WithComparers(StringComparer.Ordinal)
: ImmutableSortedDictionary<string, string>.Empty;
}
public string ImageDigest { get; }
public string Registry { get; }
public string Repository { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Namespaces { get; } = ImmutableArray<string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Tags { get; } = ImmutableArray<string>.Empty;
public bool UsedByEntrypoint { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Labels { get; } = ImmutableSortedDictionary<string, string>.Empty;
}

View File

@@ -0,0 +1,185 @@
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
public sealed record PolicyRunJob(
string SchemaVersion,
string Id,
string TenantId,
string PolicyId,
int? PolicyVersion,
PolicyRunMode Mode,
PolicyRunPriority Priority,
int PriorityRank,
string? RunId,
string? RequestedBy,
string? CorrelationId,
ImmutableSortedDictionary<string, string>? Metadata,
PolicyRunInputs Inputs,
DateTimeOffset? QueuedAt,
PolicyRunJobStatus Status,
int AttemptCount,
DateTimeOffset? LastAttemptAt,
string? LastError,
DateTimeOffset CreatedAt,
DateTimeOffset UpdatedAt,
DateTimeOffset AvailableAt,
DateTimeOffset? SubmittedAt,
DateTimeOffset? CompletedAt,
string? LeaseOwner,
DateTimeOffset? LeaseExpiresAt,
bool CancellationRequested,
DateTimeOffset? CancellationRequestedAt,
string? CancellationReason,
DateTimeOffset? CancelledAt)
{
public string SchemaVersion { get; init; } = SchedulerSchemaVersions.EnsurePolicyRunJob(SchemaVersion);
public string Id { get; init; } = Validation.EnsureId(Id, nameof(Id));
public string TenantId { get; init; } = Validation.EnsureTenantId(TenantId, nameof(TenantId));
public string PolicyId { get; init; } = Validation.EnsureSimpleIdentifier(PolicyId, nameof(PolicyId));
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? PolicyVersion { get; init; } = EnsurePolicyVersion(PolicyVersion);
public PolicyRunMode Mode { get; init; } = Mode;
public PolicyRunPriority Priority { get; init; } = Priority;
public int PriorityRank { get; init; } = PriorityRank >= 0 ? PriorityRank : GetPriorityRank(Priority);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RunId { get; init; } = NormalizeRunId(RunId);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RequestedBy { get; init; } = Validation.TrimToNull(RequestedBy);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CorrelationId { get; init; } = Validation.TrimToNull(CorrelationId);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ImmutableSortedDictionary<string, string>? Metadata { get; init; } = NormalizeMetadata(Metadata);
public PolicyRunInputs Inputs { get; init; } = Inputs ?? throw new ArgumentNullException(nameof(Inputs));
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? QueuedAt { get; init; } = Validation.NormalizeTimestamp(QueuedAt);
public PolicyRunJobStatus Status { get; init; } = Status;
public int AttemptCount { get; init; } = Validation.EnsureNonNegative(AttemptCount, nameof(AttemptCount));
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? LastAttemptAt { get; init; } = Validation.NormalizeTimestamp(LastAttemptAt);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? LastError { get; init; } = Validation.TrimToNull(LastError);
public DateTimeOffset CreatedAt { get; init; } = NormalizeTimestamp(CreatedAt, nameof(CreatedAt));
public DateTimeOffset UpdatedAt { get; init; } = NormalizeTimestamp(UpdatedAt, nameof(UpdatedAt));
public DateTimeOffset AvailableAt { get; init; } = NormalizeTimestamp(AvailableAt, nameof(AvailableAt));
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? SubmittedAt { get; init; } = Validation.NormalizeTimestamp(SubmittedAt);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CompletedAt { get; init; } = Validation.NormalizeTimestamp(CompletedAt);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? LeaseOwner { get; init; } = Validation.TrimToNull(LeaseOwner);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? LeaseExpiresAt { get; init; } = Validation.NormalizeTimestamp(LeaseExpiresAt);
public bool CancellationRequested { get; init; } = CancellationRequested;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CancellationRequestedAt { get; init; } = Validation.NormalizeTimestamp(CancellationRequestedAt);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CancellationReason { get; init; } = Validation.TrimToNull(CancellationReason);
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CancelledAt { get; init; } = Validation.NormalizeTimestamp(CancelledAt);
public PolicyRunRequest ToPolicyRunRequest(DateTimeOffset fallbackQueuedAt)
{
var queuedAt = QueuedAt ?? fallbackQueuedAt;
return new PolicyRunRequest(
TenantId,
PolicyId,
Mode,
Inputs,
Priority,
RunId,
PolicyVersion,
RequestedBy,
queuedAt,
CorrelationId,
Metadata);
}
private static int? EnsurePolicyVersion(int? value)
{
if (value is not null && value <= 0)
{
throw new ArgumentOutOfRangeException(nameof(PolicyVersion), value, "Policy version must be positive.");
}
return value;
}
private static string? NormalizeRunId(string? runId)
{
var trimmed = Validation.TrimToNull(runId);
return trimmed is null ? null : Validation.EnsureId(trimmed, nameof(runId));
}
private static ImmutableSortedDictionary<string, string>? NormalizeMetadata(ImmutableSortedDictionary<string, string>? metadata)
{
if (metadata is null || metadata.Count == 0)
{
return null;
}
var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
foreach (var (key, value) in metadata)
{
var normalizedKey = Validation.TrimToNull(key);
var normalizedValue = Validation.TrimToNull(value);
if (normalizedKey is null || normalizedValue is null)
{
continue;
}
builder[normalizedKey.ToLowerInvariant()] = normalizedValue;
}
return builder.Count == 0 ? null : builder.ToImmutable();
}
private static int GetPriorityRank(PolicyRunPriority priority)
=> priority switch
{
PolicyRunPriority.Emergency => 2,
PolicyRunPriority.High => 1,
_ => 0
};
private static DateTimeOffset NormalizeTimestamp(DateTimeOffset value, string propertyName)
{
var normalized = Validation.NormalizeTimestamp(value);
if (normalized == default)
{
throw new ArgumentException($"{propertyName} must be a valid timestamp.", propertyName);
}
return normalized;
}
}

View File

@@ -0,0 +1,930 @@
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Request payload enqueued by the policy orchestrator/clients.
/// </summary>
public sealed record PolicyRunRequest
{
public PolicyRunRequest(
string tenantId,
string policyId,
PolicyRunMode mode,
PolicyRunInputs? inputs = null,
PolicyRunPriority priority = PolicyRunPriority.Normal,
string? runId = null,
int? policyVersion = null,
string? requestedBy = null,
DateTimeOffset? queuedAt = null,
string? correlationId = null,
ImmutableSortedDictionary<string, string>? metadata = null,
string? schemaVersion = null)
: this(
tenantId,
policyId,
policyVersion,
mode,
priority,
runId,
Validation.NormalizeTimestamp(queuedAt),
Validation.TrimToNull(requestedBy),
Validation.TrimToNull(correlationId),
metadata ?? ImmutableSortedDictionary<string, string>.Empty,
inputs ?? PolicyRunInputs.Empty,
schemaVersion)
{
}
[JsonConstructor]
public PolicyRunRequest(
string tenantId,
string policyId,
int? policyVersion,
PolicyRunMode mode,
PolicyRunPriority priority,
string? runId,
DateTimeOffset? queuedAt,
string? requestedBy,
string? correlationId,
ImmutableSortedDictionary<string, string> metadata,
PolicyRunInputs inputs,
string? schemaVersion = null)
{
SchemaVersion = SchedulerSchemaVersions.EnsurePolicyRunRequest(schemaVersion);
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId));
if (policyVersion is not null && policyVersion <= 0)
{
throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive.");
}
PolicyVersion = policyVersion;
Mode = mode;
Priority = priority;
RunId = Validation.TrimToNull(runId) is { Length: > 0 } normalizedRunId
? Validation.EnsureId(normalizedRunId, nameof(runId))
: null;
QueuedAt = Validation.NormalizeTimestamp(queuedAt);
RequestedBy = Validation.TrimToNull(requestedBy);
CorrelationId = Validation.TrimToNull(correlationId);
var normalizedMetadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty)
.Select(static pair => new KeyValuePair<string, string>(
Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty,
Validation.TrimToNull(pair.Value) ?? string.Empty))
.Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value))
.DistinctBy(static pair => pair.Key, StringComparer.Ordinal)
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal);
Metadata = normalizedMetadata.Count == 0 ? null : normalizedMetadata;
Inputs = inputs ?? PolicyRunInputs.Empty;
}
public string SchemaVersion { get; }
public string TenantId { get; }
public string PolicyId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? PolicyVersion { get; }
public PolicyRunMode Mode { get; }
public PolicyRunPriority Priority { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RunId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? QueuedAt { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RequestedBy { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? CorrelationId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ImmutableSortedDictionary<string, string>? Metadata { get; }
public PolicyRunInputs Inputs { get; } = PolicyRunInputs.Empty;
}
/// <summary>
/// Scoped inputs for policy runs (SBOM set, cursors, environment).
/// </summary>
public sealed record PolicyRunInputs
{
public static PolicyRunInputs Empty { get; } = new();
public PolicyRunInputs(
IEnumerable<string>? sbomSet = null,
DateTimeOffset? advisoryCursor = null,
DateTimeOffset? vexCursor = null,
IEnumerable<KeyValuePair<string, object?>>? env = null,
bool captureExplain = false)
{
_sbomSet = NormalizeSbomSet(sbomSet);
_advisoryCursor = Validation.NormalizeTimestamp(advisoryCursor);
_vexCursor = Validation.NormalizeTimestamp(vexCursor);
_environment = NormalizeEnvironment(env);
CaptureExplain = captureExplain;
}
public PolicyRunInputs()
{
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> SbomSet
{
get => _sbomSet;
init => _sbomSet = NormalizeSbomSet(value);
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? AdvisoryCursor
{
get => _advisoryCursor;
init => _advisoryCursor = Validation.NormalizeTimestamp(value);
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? VexCursor
{
get => _vexCursor;
init => _vexCursor = Validation.NormalizeTimestamp(value);
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public IReadOnlyDictionary<string, JsonElement> Environment
{
get => _environment;
init => _environment = NormalizeEnvironment(value);
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool CaptureExplain { get; init; }
private ImmutableArray<string> _sbomSet = ImmutableArray<string>.Empty;
private DateTimeOffset? _advisoryCursor;
private DateTimeOffset? _vexCursor;
private IReadOnlyDictionary<string, JsonElement> _environment = ImmutableSortedDictionary<string, JsonElement>.Empty;
private static ImmutableArray<string> NormalizeSbomSet(IEnumerable<string>? values)
=> Validation.NormalizeStringSet(values, nameof(SbomSet));
private static ImmutableArray<string> NormalizeSbomSet(ImmutableArray<string> values)
=> values.IsDefaultOrEmpty ? ImmutableArray<string>.Empty : NormalizeSbomSet(values.AsEnumerable());
private static IReadOnlyDictionary<string, JsonElement> NormalizeEnvironment(IEnumerable<KeyValuePair<string, object?>>? entries)
{
if (entries is null)
{
return ImmutableSortedDictionary<string, JsonElement>.Empty;
}
var builder = ImmutableSortedDictionary.CreateBuilder<string, JsonElement>(StringComparer.Ordinal);
foreach (var entry in entries)
{
var key = Validation.TrimToNull(entry.Key);
if (key is null)
{
continue;
}
var normalizedKey = key.ToLowerInvariant();
var element = entry.Value switch
{
JsonElement jsonElement => jsonElement.Clone(),
JsonDocument jsonDocument => jsonDocument.RootElement.Clone(),
string text => JsonSerializer.SerializeToElement(text).Clone(),
bool boolean => JsonSerializer.SerializeToElement(boolean).Clone(),
int integer => JsonSerializer.SerializeToElement(integer).Clone(),
long longValue => JsonSerializer.SerializeToElement(longValue).Clone(),
double doubleValue => JsonSerializer.SerializeToElement(doubleValue).Clone(),
decimal decimalValue => JsonSerializer.SerializeToElement(decimalValue).Clone(),
null => JsonSerializer.SerializeToElement<object?>(null).Clone(),
_ => JsonSerializer.SerializeToElement(entry.Value, entry.Value.GetType()).Clone(),
};
builder[normalizedKey] = element;
}
return builder.ToImmutable();
}
private static IReadOnlyDictionary<string, JsonElement> NormalizeEnvironment(IReadOnlyDictionary<string, JsonElement>? environment)
{
if (environment is null || environment.Count == 0)
{
return ImmutableSortedDictionary<string, JsonElement>.Empty;
}
var builder = ImmutableSortedDictionary.CreateBuilder<string, JsonElement>(StringComparer.Ordinal);
foreach (var entry in environment)
{
var key = Validation.TrimToNull(entry.Key);
if (key is null)
{
continue;
}
builder[key.ToLowerInvariant()] = entry.Value.Clone();
}
return builder.ToImmutable();
}
}
/// <summary>
/// Stored status for a policy run (policy_runs collection).
/// </summary>
public sealed record PolicyRunStatus
{
public PolicyRunStatus(
string runId,
string tenantId,
string policyId,
int policyVersion,
PolicyRunMode mode,
PolicyRunExecutionStatus status,
PolicyRunPriority priority,
DateTimeOffset queuedAt,
PolicyRunStats? stats = null,
PolicyRunInputs? inputs = null,
DateTimeOffset? startedAt = null,
DateTimeOffset? finishedAt = null,
string? determinismHash = null,
string? errorCode = null,
string? error = null,
int attempts = 0,
string? traceId = null,
string? explainUri = null,
ImmutableSortedDictionary<string, string>? metadata = null,
string? schemaVersion = null)
: this(
runId,
tenantId,
policyId,
policyVersion,
mode,
status,
priority,
Validation.NormalizeTimestamp(queuedAt),
Validation.NormalizeTimestamp(startedAt),
Validation.NormalizeTimestamp(finishedAt),
stats ?? PolicyRunStats.Empty,
inputs ?? PolicyRunInputs.Empty,
determinismHash,
Validation.TrimToNull(errorCode),
Validation.TrimToNull(error),
attempts,
Validation.TrimToNull(traceId),
Validation.TrimToNull(explainUri),
metadata ?? ImmutableSortedDictionary<string, string>.Empty,
schemaVersion)
{
}
[JsonConstructor]
public PolicyRunStatus(
string runId,
string tenantId,
string policyId,
int policyVersion,
PolicyRunMode mode,
PolicyRunExecutionStatus status,
PolicyRunPriority priority,
DateTimeOffset queuedAt,
DateTimeOffset? startedAt,
DateTimeOffset? finishedAt,
PolicyRunStats stats,
PolicyRunInputs inputs,
string? determinismHash,
string? errorCode,
string? error,
int attempts,
string? traceId,
string? explainUri,
ImmutableSortedDictionary<string, string> metadata,
string? schemaVersion = null)
{
SchemaVersion = SchedulerSchemaVersions.EnsurePolicyRunStatus(schemaVersion);
RunId = Validation.EnsureId(runId, nameof(runId));
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId));
if (policyVersion <= 0)
{
throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive.");
}
PolicyVersion = policyVersion;
Mode = mode;
Status = status;
Priority = priority;
QueuedAt = Validation.NormalizeTimestamp(queuedAt);
StartedAt = Validation.NormalizeTimestamp(startedAt);
FinishedAt = Validation.NormalizeTimestamp(finishedAt);
Stats = stats ?? PolicyRunStats.Empty;
Inputs = inputs ?? PolicyRunInputs.Empty;
DeterminismHash = Validation.TrimToNull(determinismHash);
ErrorCode = Validation.TrimToNull(errorCode);
Error = Validation.TrimToNull(error);
Attempts = attempts < 0
? throw new ArgumentOutOfRangeException(nameof(attempts), attempts, "Attempts must be non-negative.")
: attempts;
TraceId = Validation.TrimToNull(traceId);
ExplainUri = Validation.TrimToNull(explainUri);
Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty)
.Select(static pair => new KeyValuePair<string, string>(
Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty,
Validation.TrimToNull(pair.Value) ?? string.Empty))
.Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value))
.DistinctBy(static pair => pair.Key, StringComparer.Ordinal)
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal);
}
public string SchemaVersion { get; }
public string RunId { get; }
public string TenantId { get; }
public string PolicyId { get; }
public int PolicyVersion { get; }
public PolicyRunMode Mode { get; }
public PolicyRunExecutionStatus Status { get; init; }
public PolicyRunPriority Priority { get; init; }
public DateTimeOffset QueuedAt { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? StartedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? FinishedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DeterminismHash { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ErrorCode { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Error { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public int Attempts { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? TraceId { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ExplainUri { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; init; } = ImmutableSortedDictionary<string, string>.Empty;
public PolicyRunStats Stats { get; init; } = PolicyRunStats.Empty;
public PolicyRunInputs Inputs { get; init; } = PolicyRunInputs.Empty;
}
/// <summary>
/// Aggregated metrics captured for a policy run.
/// </summary>
public sealed record PolicyRunStats
{
public static PolicyRunStats Empty { get; } = new();
public PolicyRunStats(
int components = 0,
int rulesFired = 0,
int findingsWritten = 0,
int vexOverrides = 0,
int quieted = 0,
int suppressed = 0,
double? durationSeconds = null)
{
Components = Validation.EnsureNonNegative(components, nameof(components));
RulesFired = Validation.EnsureNonNegative(rulesFired, nameof(rulesFired));
FindingsWritten = Validation.EnsureNonNegative(findingsWritten, nameof(findingsWritten));
VexOverrides = Validation.EnsureNonNegative(vexOverrides, nameof(vexOverrides));
Quieted = Validation.EnsureNonNegative(quieted, nameof(quieted));
Suppressed = Validation.EnsureNonNegative(suppressed, nameof(suppressed));
DurationSeconds = durationSeconds is { } seconds && seconds < 0
? throw new ArgumentOutOfRangeException(nameof(durationSeconds), durationSeconds, "Duration must be non-negative.")
: durationSeconds;
}
public int Components { get; } = 0;
public int RulesFired { get; } = 0;
public int FindingsWritten { get; } = 0;
public int VexOverrides { get; } = 0;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public int Quieted { get; } = 0;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public int Suppressed { get; } = 0;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public double? DurationSeconds { get; }
}
/// <summary>
/// Summary payload returned by simulations and run diffs.
/// </summary>
public sealed record PolicyDiffSummary
{
public PolicyDiffSummary(
int added,
int removed,
int unchanged,
IEnumerable<KeyValuePair<string, PolicyDiffSeverityDelta>>? bySeverity = null,
IEnumerable<PolicyDiffRuleDelta>? ruleHits = null,
string? schemaVersion = null)
: this(
Validation.EnsureNonNegative(added, nameof(added)),
Validation.EnsureNonNegative(removed, nameof(removed)),
Validation.EnsureNonNegative(unchanged, nameof(unchanged)),
NormalizeSeverity(bySeverity),
NormalizeRuleHits(ruleHits),
schemaVersion)
{
}
[JsonConstructor]
public PolicyDiffSummary(
int added,
int removed,
int unchanged,
ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> bySeverity,
ImmutableArray<PolicyDiffRuleDelta> ruleHits,
string? schemaVersion = null)
{
Added = Validation.EnsureNonNegative(added, nameof(added));
Removed = Validation.EnsureNonNegative(removed, nameof(removed));
Unchanged = Validation.EnsureNonNegative(unchanged, nameof(unchanged));
BySeverity = NormalizeSeverity(bySeverity);
RuleHits = ruleHits.IsDefault ? ImmutableArray<PolicyDiffRuleDelta>.Empty : ruleHits;
SchemaVersion = SchedulerSchemaVersions.EnsurePolicyDiffSummary(schemaVersion);
}
public string SchemaVersion { get; }
public int Added { get; }
public int Removed { get; }
public int Unchanged { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> BySeverity { get; } = ImmutableSortedDictionary<string, PolicyDiffSeverityDelta>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<PolicyDiffRuleDelta> RuleHits { get; } = ImmutableArray<PolicyDiffRuleDelta>.Empty;
private static ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> NormalizeSeverity(IEnumerable<KeyValuePair<string, PolicyDiffSeverityDelta>>? buckets)
{
if (buckets is null)
{
return ImmutableSortedDictionary<string, PolicyDiffSeverityDelta>.Empty;
}
var builder = ImmutableSortedDictionary.CreateBuilder<string, PolicyDiffSeverityDelta>(StringComparer.OrdinalIgnoreCase);
foreach (var bucket in buckets)
{
var key = Validation.TrimToNull(bucket.Key);
if (key is null)
{
continue;
}
var normalizedKey = char.ToUpperInvariant(key[0]) + key[1..].ToLowerInvariant();
builder[normalizedKey] = bucket.Value ?? PolicyDiffSeverityDelta.Empty;
}
return builder.ToImmutable();
}
private static ImmutableArray<PolicyDiffRuleDelta> NormalizeRuleHits(IEnumerable<PolicyDiffRuleDelta>? ruleHits)
{
if (ruleHits is null)
{
return ImmutableArray<PolicyDiffRuleDelta>.Empty;
}
return ruleHits
.Where(static hit => hit is not null)
.Select(static hit => hit!)
.OrderBy(static hit => hit.RuleId, StringComparer.Ordinal)
.ThenBy(static hit => hit.RuleName, StringComparer.Ordinal)
.ToImmutableArray();
}
}
/// <summary>
/// Delta counts for a single severity bucket.
/// </summary>
public sealed record PolicyDiffSeverityDelta
{
public static PolicyDiffSeverityDelta Empty { get; } = new();
public PolicyDiffSeverityDelta(int up = 0, int down = 0)
{
Up = Validation.EnsureNonNegative(up, nameof(up));
Down = Validation.EnsureNonNegative(down, nameof(down));
}
public int Up { get; } = 0;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public int Down { get; } = 0;
}
/// <summary>
/// Delta counts per rule for simulation reporting.
/// </summary>
public sealed record PolicyDiffRuleDelta
{
public PolicyDiffRuleDelta(string ruleId, string ruleName, int up = 0, int down = 0)
{
RuleId = Validation.EnsureSimpleIdentifier(ruleId, nameof(ruleId));
RuleName = Validation.EnsureName(ruleName, nameof(ruleName));
Up = Validation.EnsureNonNegative(up, nameof(up));
Down = Validation.EnsureNonNegative(down, nameof(down));
}
public string RuleId { get; }
public string RuleName { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public int Up { get; } = 0;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public int Down { get; } = 0;
}
/// <summary>
/// Canonical explain trace for a policy finding.
/// </summary>
public sealed record PolicyExplainTrace
{
public PolicyExplainTrace(
string findingId,
string policyId,
int policyVersion,
string tenantId,
string runId,
PolicyExplainVerdict verdict,
DateTimeOffset evaluatedAt,
IEnumerable<PolicyExplainRule>? ruleChain = null,
IEnumerable<PolicyExplainEvidence>? evidence = null,
IEnumerable<PolicyExplainVexImpact>? vexImpacts = null,
IEnumerable<PolicyExplainHistoryEvent>? history = null,
ImmutableSortedDictionary<string, string>? metadata = null,
string? schemaVersion = null)
: this(
findingId,
policyId,
policyVersion,
tenantId,
runId,
Validation.NormalizeTimestamp(evaluatedAt),
verdict,
NormalizeRuleChain(ruleChain),
NormalizeEvidence(evidence),
NormalizeVexImpacts(vexImpacts),
NormalizeHistory(history),
metadata ?? ImmutableSortedDictionary<string, string>.Empty,
schemaVersion)
{
}
[JsonConstructor]
public PolicyExplainTrace(
string findingId,
string policyId,
int policyVersion,
string tenantId,
string runId,
DateTimeOffset evaluatedAt,
PolicyExplainVerdict verdict,
ImmutableArray<PolicyExplainRule> ruleChain,
ImmutableArray<PolicyExplainEvidence> evidence,
ImmutableArray<PolicyExplainVexImpact> vexImpacts,
ImmutableArray<PolicyExplainHistoryEvent> history,
ImmutableSortedDictionary<string, string> metadata,
string? schemaVersion = null)
{
SchemaVersion = SchedulerSchemaVersions.EnsurePolicyExplainTrace(schemaVersion);
FindingId = Validation.EnsureSimpleIdentifier(findingId, nameof(findingId));
PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId));
if (policyVersion <= 0)
{
throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive.");
}
PolicyVersion = policyVersion;
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
RunId = Validation.EnsureId(runId, nameof(runId));
EvaluatedAt = Validation.NormalizeTimestamp(evaluatedAt);
Verdict = verdict ?? throw new ArgumentNullException(nameof(verdict));
RuleChain = ruleChain.IsDefault ? ImmutableArray<PolicyExplainRule>.Empty : ruleChain;
Evidence = evidence.IsDefault ? ImmutableArray<PolicyExplainEvidence>.Empty : evidence;
VexImpacts = vexImpacts.IsDefault ? ImmutableArray<PolicyExplainVexImpact>.Empty : vexImpacts;
History = history.IsDefault ? ImmutableArray<PolicyExplainHistoryEvent>.Empty : history;
Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty)
.Select(static pair => new KeyValuePair<string, string>(
Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty,
Validation.TrimToNull(pair.Value) ?? string.Empty))
.Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value))
.DistinctBy(static pair => pair.Key, StringComparer.Ordinal)
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal);
}
public string SchemaVersion { get; }
public string FindingId { get; }
public string PolicyId { get; }
public int PolicyVersion { get; }
public string TenantId { get; }
public string RunId { get; }
public DateTimeOffset EvaluatedAt { get; }
public PolicyExplainVerdict Verdict { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<PolicyExplainRule> RuleChain { get; } = ImmutableArray<PolicyExplainRule>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<PolicyExplainEvidence> Evidence { get; } = ImmutableArray<PolicyExplainEvidence>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<PolicyExplainVexImpact> VexImpacts { get; } = ImmutableArray<PolicyExplainVexImpact>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<PolicyExplainHistoryEvent> History { get; } = ImmutableArray<PolicyExplainHistoryEvent>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty;
private static ImmutableArray<PolicyExplainRule> NormalizeRuleChain(IEnumerable<PolicyExplainRule>? rules)
{
if (rules is null)
{
return ImmutableArray<PolicyExplainRule>.Empty;
}
return rules
.Where(static rule => rule is not null)
.Select(static rule => rule!)
.ToImmutableArray();
}
private static ImmutableArray<PolicyExplainEvidence> NormalizeEvidence(IEnumerable<PolicyExplainEvidence>? evidence)
{
if (evidence is null)
{
return ImmutableArray<PolicyExplainEvidence>.Empty;
}
return evidence
.Where(static item => item is not null)
.Select(static item => item!)
.OrderBy(static item => item.Type, StringComparer.Ordinal)
.ThenBy(static item => item.Reference, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<PolicyExplainVexImpact> NormalizeVexImpacts(IEnumerable<PolicyExplainVexImpact>? impacts)
{
if (impacts is null)
{
return ImmutableArray<PolicyExplainVexImpact>.Empty;
}
return impacts
.Where(static impact => impact is not null)
.Select(static impact => impact!)
.OrderBy(static impact => impact.StatementId, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<PolicyExplainHistoryEvent> NormalizeHistory(IEnumerable<PolicyExplainHistoryEvent>? history)
{
if (history is null)
{
return ImmutableArray<PolicyExplainHistoryEvent>.Empty;
}
return history
.Where(static entry => entry is not null)
.Select(static entry => entry!)
.OrderBy(static entry => entry.OccurredAt)
.ToImmutableArray();
}
}
/// <summary>
/// Verdict metadata for explain traces.
/// </summary>
public sealed record PolicyExplainVerdict
{
public PolicyExplainVerdict(
PolicyVerdictStatus status,
SeverityRank? severity = null,
bool quiet = false,
double? score = null,
string? rationale = null)
{
Status = status;
Severity = severity;
Quiet = quiet;
Score = score;
Rationale = Validation.TrimToNull(rationale);
}
public PolicyVerdictStatus Status { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public SeverityRank? Severity { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool Quiet { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public double? Score { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Rationale { get; }
}
/// <summary>
/// Rule evaluation entry captured in explain traces.
/// </summary>
public sealed record PolicyExplainRule
{
public PolicyExplainRule(
string ruleId,
string ruleName,
string action,
string decision,
double score,
string? condition = null)
{
RuleId = Validation.EnsureSimpleIdentifier(ruleId, nameof(ruleId));
RuleName = Validation.EnsureName(ruleName, nameof(ruleName));
Action = Validation.TrimToNull(action) ?? throw new ArgumentNullException(nameof(action));
Decision = Validation.TrimToNull(decision) ?? throw new ArgumentNullException(nameof(decision));
Score = score;
Condition = Validation.TrimToNull(condition);
}
public string RuleId { get; }
public string RuleName { get; }
public string Action { get; }
public string Decision { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public double Score { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Condition { get; }
}
/// <summary>
/// Evidence entry considered during policy evaluation.
/// </summary>
public sealed record PolicyExplainEvidence
{
public PolicyExplainEvidence(
string type,
string reference,
string source,
string status,
double weight = 0,
string? justification = null,
ImmutableSortedDictionary<string, string>? metadata = null)
{
Type = Validation.TrimToNull(type) ?? throw new ArgumentNullException(nameof(type));
Reference = Validation.TrimToNull(reference) ?? throw new ArgumentNullException(nameof(reference));
Source = Validation.TrimToNull(source) ?? throw new ArgumentNullException(nameof(source));
Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status));
Weight = weight;
Justification = Validation.TrimToNull(justification);
Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty)
.Select(static pair => new KeyValuePair<string, string>(
Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty,
Validation.TrimToNull(pair.Value) ?? string.Empty))
.Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value))
.DistinctBy(static pair => pair.Key, StringComparer.Ordinal)
.OrderBy(static pair => pair.Key, StringComparer.Ordinal)
.ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal);
}
public string Type { get; }
public string Reference { get; }
public string Source { get; }
public string Status { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public double Weight { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Justification { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty;
}
/// <summary>
/// VEX statement impact summary captured in explain traces.
/// </summary>
public sealed record PolicyExplainVexImpact
{
public PolicyExplainVexImpact(
string statementId,
string provider,
string status,
bool accepted,
string? justification = null,
string? confidence = null)
{
StatementId = Validation.TrimToNull(statementId) ?? throw new ArgumentNullException(nameof(statementId));
Provider = Validation.TrimToNull(provider) ?? throw new ArgumentNullException(nameof(provider));
Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status));
Accepted = accepted;
Justification = Validation.TrimToNull(justification);
Confidence = Validation.TrimToNull(confidence);
}
public string StatementId { get; }
public string Provider { get; }
public string Status { get; }
public bool Accepted { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Justification { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Confidence { get; }
}
/// <summary>
/// History entry for a finding's policy lifecycle.
/// </summary>
public sealed record PolicyExplainHistoryEvent
{
public PolicyExplainHistoryEvent(
string status,
DateTimeOffset occurredAt,
string? actor = null,
string? note = null)
{
Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status));
OccurredAt = Validation.NormalizeTimestamp(occurredAt);
Actor = Validation.TrimToNull(actor);
Note = Validation.TrimToNull(note);
}
public string Status { get; }
public DateTimeOffset OccurredAt { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Actor { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Note { get; }
}

View File

@@ -0,0 +1,378 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Execution record for a scheduler run.
/// </summary>
public sealed record Run
{
public Run(
string id,
string tenantId,
RunTrigger trigger,
RunState state,
RunStats stats,
DateTimeOffset createdAt,
RunReason? reason = null,
string? scheduleId = null,
DateTimeOffset? startedAt = null,
DateTimeOffset? finishedAt = null,
string? error = null,
IEnumerable<DeltaSummary>? deltas = null,
string? schemaVersion = null)
: this(
id,
tenantId,
trigger,
state,
stats,
reason ?? RunReason.Empty,
scheduleId,
Validation.NormalizeTimestamp(createdAt),
Validation.NormalizeTimestamp(startedAt),
Validation.NormalizeTimestamp(finishedAt),
Validation.TrimToNull(error),
NormalizeDeltas(deltas),
schemaVersion)
{
}
[JsonConstructor]
public Run(
string id,
string tenantId,
RunTrigger trigger,
RunState state,
RunStats stats,
RunReason reason,
string? scheduleId,
DateTimeOffset createdAt,
DateTimeOffset? startedAt,
DateTimeOffset? finishedAt,
string? error,
ImmutableArray<DeltaSummary> deltas,
string? schemaVersion = null)
{
Id = Validation.EnsureId(id, nameof(id));
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
Trigger = trigger;
State = state;
Stats = stats ?? throw new ArgumentNullException(nameof(stats));
Reason = reason ?? RunReason.Empty;
ScheduleId = Validation.TrimToNull(scheduleId);
CreatedAt = Validation.NormalizeTimestamp(createdAt);
StartedAt = Validation.NormalizeTimestamp(startedAt);
FinishedAt = Validation.NormalizeTimestamp(finishedAt);
Error = Validation.TrimToNull(error);
Deltas = deltas.IsDefault
? ImmutableArray<DeltaSummary>.Empty
: deltas.OrderBy(static delta => delta.ImageDigest, StringComparer.Ordinal).ToImmutableArray();
SchemaVersion = SchedulerSchemaVersions.EnsureRun(schemaVersion);
}
public string SchemaVersion { get; }
public string Id { get; }
public string TenantId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ScheduleId { get; }
public RunTrigger Trigger { get; }
public RunState State { get; init; }
public RunStats Stats { get; init; }
public RunReason Reason { get; }
public DateTimeOffset CreatedAt { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? StartedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? FinishedAt { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Error { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<DeltaSummary> Deltas { get; } = ImmutableArray<DeltaSummary>.Empty;
private static ImmutableArray<DeltaSummary> NormalizeDeltas(IEnumerable<DeltaSummary>? deltas)
{
if (deltas is null)
{
return ImmutableArray<DeltaSummary>.Empty;
}
return deltas
.Where(static delta => delta is not null)
.Select(static delta => delta!)
.OrderBy(static delta => delta.ImageDigest, StringComparer.Ordinal)
.ToImmutableArray();
}
}
/// <summary>
/// Context describing why a run executed.
/// </summary>
public sealed record RunReason
{
public static RunReason Empty { get; } = new();
public RunReason(
string? manualReason = null,
string? feedserExportId = null,
string? vexerExportId = null,
string? cursor = null)
{
ManualReason = Validation.TrimToNull(manualReason);
FeedserExportId = Validation.TrimToNull(feedserExportId);
VexerExportId = Validation.TrimToNull(vexerExportId);
Cursor = Validation.TrimToNull(cursor);
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ManualReason { get; } = null;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? FeedserExportId { get; } = null;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? VexerExportId { get; } = null;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Cursor { get; } = null;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ImpactWindowFrom { get; init; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ImpactWindowTo { get; init; }
}
/// <summary>
/// Aggregated counters for a scheduler run.
/// </summary>
public sealed record RunStats
{
public static RunStats Empty { get; } = new();
public RunStats(
int candidates = 0,
int deduped = 0,
int queued = 0,
int completed = 0,
int deltas = 0,
int newCriticals = 0,
int newHigh = 0,
int newMedium = 0,
int newLow = 0)
{
Candidates = Validation.EnsureNonNegative(candidates, nameof(candidates));
Deduped = Validation.EnsureNonNegative(deduped, nameof(deduped));
Queued = Validation.EnsureNonNegative(queued, nameof(queued));
Completed = Validation.EnsureNonNegative(completed, nameof(completed));
Deltas = Validation.EnsureNonNegative(deltas, nameof(deltas));
NewCriticals = Validation.EnsureNonNegative(newCriticals, nameof(newCriticals));
NewHigh = Validation.EnsureNonNegative(newHigh, nameof(newHigh));
NewMedium = Validation.EnsureNonNegative(newMedium, nameof(newMedium));
NewLow = Validation.EnsureNonNegative(newLow, nameof(newLow));
}
public int Candidates { get; } = 0;
public int Deduped { get; } = 0;
public int Queued { get; } = 0;
public int Completed { get; } = 0;
public int Deltas { get; } = 0;
public int NewCriticals { get; } = 0;
public int NewHigh { get; } = 0;
public int NewMedium { get; } = 0;
public int NewLow { get; } = 0;
}
/// <summary>
/// Snapshot of delta impact for an image processed in a run.
/// </summary>
public sealed record DeltaSummary
{
public DeltaSummary(
string imageDigest,
int newFindings,
int newCriticals,
int newHigh,
int newMedium,
int newLow,
IEnumerable<string>? kevHits = null,
IEnumerable<DeltaFinding>? topFindings = null,
string? reportUrl = null,
DeltaAttestation? attestation = null,
DateTimeOffset? detectedAt = null)
: this(
imageDigest,
Validation.EnsureNonNegative(newFindings, nameof(newFindings)),
Validation.EnsureNonNegative(newCriticals, nameof(newCriticals)),
Validation.EnsureNonNegative(newHigh, nameof(newHigh)),
Validation.EnsureNonNegative(newMedium, nameof(newMedium)),
Validation.EnsureNonNegative(newLow, nameof(newLow)),
NormalizeKevHits(kevHits),
NormalizeFindings(topFindings),
Validation.TrimToNull(reportUrl),
attestation,
Validation.NormalizeTimestamp(detectedAt))
{
}
[JsonConstructor]
public DeltaSummary(
string imageDigest,
int newFindings,
int newCriticals,
int newHigh,
int newMedium,
int newLow,
ImmutableArray<string> kevHits,
ImmutableArray<DeltaFinding> topFindings,
string? reportUrl,
DeltaAttestation? attestation,
DateTimeOffset? detectedAt)
{
ImageDigest = Validation.EnsureDigestFormat(imageDigest, nameof(imageDigest));
NewFindings = Validation.EnsureNonNegative(newFindings, nameof(newFindings));
NewCriticals = Validation.EnsureNonNegative(newCriticals, nameof(newCriticals));
NewHigh = Validation.EnsureNonNegative(newHigh, nameof(newHigh));
NewMedium = Validation.EnsureNonNegative(newMedium, nameof(newMedium));
NewLow = Validation.EnsureNonNegative(newLow, nameof(newLow));
KevHits = kevHits.IsDefault ? ImmutableArray<string>.Empty : kevHits;
TopFindings = topFindings.IsDefault
? ImmutableArray<DeltaFinding>.Empty
: topFindings
.OrderBy(static finding => finding.Severity, SeverityRankComparer.Instance)
.ThenBy(static finding => finding.VulnerabilityId, StringComparer.Ordinal)
.ToImmutableArray();
ReportUrl = Validation.TrimToNull(reportUrl);
Attestation = attestation;
DetectedAt = Validation.NormalizeTimestamp(detectedAt);
}
public string ImageDigest { get; }
public int NewFindings { get; }
public int NewCriticals { get; }
public int NewHigh { get; }
public int NewMedium { get; }
public int NewLow { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> KevHits { get; } = ImmutableArray<string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<DeltaFinding> TopFindings { get; } = ImmutableArray<DeltaFinding>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ReportUrl { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DeltaAttestation? Attestation { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? DetectedAt { get; }
private static ImmutableArray<string> NormalizeKevHits(IEnumerable<string>? kevHits)
=> Validation.NormalizeStringSet(kevHits, nameof(kevHits));
private static ImmutableArray<DeltaFinding> NormalizeFindings(IEnumerable<DeltaFinding>? findings)
{
if (findings is null)
{
return ImmutableArray<DeltaFinding>.Empty;
}
return findings
.Where(static finding => finding is not null)
.Select(static finding => finding!)
.OrderBy(static finding => finding.Severity, SeverityRankComparer.Instance)
.ThenBy(static finding => finding.VulnerabilityId, StringComparer.Ordinal)
.ToImmutableArray();
}
}
/// <summary>
/// Top finding entry included in delta summaries.
/// </summary>
public sealed record DeltaFinding
{
public DeltaFinding(string purl, string vulnerabilityId, SeverityRank severity, string? link = null)
{
Purl = Validation.EnsureSimpleIdentifier(purl, nameof(purl));
VulnerabilityId = Validation.EnsureSimpleIdentifier(vulnerabilityId, nameof(vulnerabilityId));
Severity = severity;
Link = Validation.TrimToNull(link);
}
public string Purl { get; }
public string VulnerabilityId { get; }
public SeverityRank Severity { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Link { get; }
}
/// <summary>
/// Rekor/attestation information surfaced with a delta summary.
/// </summary>
public sealed record DeltaAttestation
{
public DeltaAttestation(string? uuid, bool? verified = null)
{
Uuid = Validation.TrimToNull(uuid);
Verified = verified;
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Uuid { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public bool? Verified { get; }
}
internal sealed class SeverityRankComparer : IComparer<SeverityRank>
{
public static SeverityRankComparer Instance { get; } = new();
private static readonly Dictionary<SeverityRank, int> Order = new()
{
[SeverityRank.Critical] = 0,
[SeverityRank.High] = 1,
[SeverityRank.Unknown] = 2,
[SeverityRank.Medium] = 3,
[SeverityRank.Low] = 4,
[SeverityRank.Info] = 5,
[SeverityRank.None] = 6,
};
public int Compare(SeverityRank x, SeverityRank y)
=> GetOrder(x).CompareTo(GetOrder(y));
private static int GetOrder(SeverityRank severity)
=> Order.TryGetValue(severity, out var value) ? value : int.MaxValue;
}

View File

@@ -0,0 +1,33 @@
using System;
using System.Globalization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Convenience helpers for <see cref="RunReason"/> mutations.
/// </summary>
public static class RunReasonExtensions
{
/// <summary>
/// Returns a copy of <paramref name="reason"/> with impact window timestamps normalized to ISO-8601.
/// </summary>
public static RunReason WithImpactWindow(
this RunReason reason,
DateTimeOffset? from,
DateTimeOffset? to)
{
var normalizedFrom = Validation.NormalizeTimestamp(from);
var normalizedTo = Validation.NormalizeTimestamp(to);
if (normalizedFrom.HasValue && normalizedTo.HasValue && normalizedFrom > normalizedTo)
{
throw new ArgumentException("Impact window start must be earlier than or equal to end.");
}
return reason with
{
ImpactWindowFrom = normalizedFrom?.ToString("O", CultureInfo.InvariantCulture),
ImpactWindowTo = normalizedTo?.ToString("O", CultureInfo.InvariantCulture),
};
}
}

View File

@@ -0,0 +1,157 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Encapsulates allowed <see cref="RunState"/> transitions and invariants.
/// </summary>
public static class RunStateMachine
{
private static readonly IReadOnlyDictionary<RunState, RunState[]> Adjacency = new Dictionary<RunState, RunState[]>
{
[RunState.Planning] = new[] { RunState.Planning, RunState.Queued, RunState.Cancelled },
[RunState.Queued] = new[] { RunState.Queued, RunState.Running, RunState.Cancelled },
[RunState.Running] = new[] { RunState.Running, RunState.Completed, RunState.Error, RunState.Cancelled },
[RunState.Completed] = new[] { RunState.Completed },
[RunState.Error] = new[] { RunState.Error },
[RunState.Cancelled] = new[] { RunState.Cancelled },
};
public static bool CanTransition(RunState from, RunState to)
{
if (!Adjacency.TryGetValue(from, out var allowed))
{
return false;
}
return allowed.Contains(to);
}
public static bool IsTerminal(RunState state)
=> state is RunState.Completed or RunState.Error or RunState.Cancelled;
/// <summary>
/// Applies a state transition ensuring timestamps, stats, and error contracts stay consistent.
/// </summary>
public static Run EnsureTransition(
Run run,
RunState next,
DateTimeOffset timestamp,
Action<RunStatsBuilder>? mutateStats = null,
string? errorMessage = null)
{
ArgumentNullException.ThrowIfNull(run);
var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp);
var current = run.State;
if (!CanTransition(current, next))
{
throw new InvalidOperationException($"Run state transition from '{current}' to '{next}' is not allowed.");
}
var statsBuilder = new RunStatsBuilder(run.Stats);
mutateStats?.Invoke(statsBuilder);
var newStats = statsBuilder.Build();
var startedAt = run.StartedAt;
var finishedAt = run.FinishedAt;
if (current != RunState.Running && next == RunState.Running && startedAt is null)
{
startedAt = normalizedTimestamp;
}
if (IsTerminal(next))
{
finishedAt ??= normalizedTimestamp;
}
if (startedAt is { } start && start < run.CreatedAt)
{
throw new InvalidOperationException("Run started time cannot be earlier than created time.");
}
if (finishedAt is { } finish)
{
if (startedAt is { } startTime && finish < startTime)
{
throw new InvalidOperationException("Run finished time cannot be earlier than start time.");
}
if (!IsTerminal(next))
{
throw new InvalidOperationException("Finished time present but next state is not terminal.");
}
}
string? nextError = null;
if (next == RunState.Error)
{
var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? run.Error : errorMessage.Trim();
if (string.IsNullOrWhiteSpace(effectiveError))
{
throw new InvalidOperationException("Transitioning to Error requires a non-empty error message.");
}
nextError = effectiveError;
}
else if (!string.IsNullOrWhiteSpace(errorMessage))
{
throw new InvalidOperationException("Error message can only be provided when transitioning to Error state.");
}
var updated = run with
{
State = next,
Stats = newStats,
StartedAt = startedAt,
FinishedAt = finishedAt,
Error = nextError,
};
Validate(updated);
return updated;
}
public static void Validate(Run run)
{
ArgumentNullException.ThrowIfNull(run);
if (run.StartedAt is { } started && started < run.CreatedAt)
{
throw new InvalidOperationException("Run.StartedAt cannot be earlier than CreatedAt.");
}
if (run.FinishedAt is { } finished)
{
if (run.StartedAt is { } startedAt && finished < startedAt)
{
throw new InvalidOperationException("Run.FinishedAt cannot be earlier than StartedAt.");
}
if (!IsTerminal(run.State))
{
throw new InvalidOperationException("Run.FinishedAt set while state is not terminal.");
}
}
else if (IsTerminal(run.State))
{
throw new InvalidOperationException("Terminal run states must include FinishedAt.");
}
if (run.State == RunState.Error)
{
if (string.IsNullOrWhiteSpace(run.Error))
{
throw new InvalidOperationException("Run.Error must be populated when state is Error.");
}
}
else if (!string.IsNullOrWhiteSpace(run.Error))
{
throw new InvalidOperationException("Run.Error must be null for non-error states.");
}
}
}

View File

@@ -0,0 +1,92 @@
using System;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Helper that enforces monotonic <see cref="RunStats"/> updates.
/// </summary>
public sealed class RunStatsBuilder
{
private int _candidates;
private int _deduped;
private int _queued;
private int _completed;
private int _deltas;
private int _newCriticals;
private int _newHigh;
private int _newMedium;
private int _newLow;
public RunStatsBuilder(RunStats? baseline = null)
{
baseline ??= RunStats.Empty;
_candidates = baseline.Candidates;
_deduped = baseline.Deduped;
_queued = baseline.Queued;
_completed = baseline.Completed;
_deltas = baseline.Deltas;
_newCriticals = baseline.NewCriticals;
_newHigh = baseline.NewHigh;
_newMedium = baseline.NewMedium;
_newLow = baseline.NewLow;
}
public void SetCandidates(int value) => _candidates = EnsureMonotonic(value, _candidates, nameof(RunStats.Candidates));
public void IncrementCandidates(int value = 1) => SetCandidates(_candidates + value);
public void SetDeduped(int value) => _deduped = EnsureMonotonic(value, _deduped, nameof(RunStats.Deduped));
public void IncrementDeduped(int value = 1) => SetDeduped(_deduped + value);
public void SetQueued(int value) => _queued = EnsureMonotonic(value, _queued, nameof(RunStats.Queued));
public void IncrementQueued(int value = 1) => SetQueued(_queued + value);
public void SetCompleted(int value) => _completed = EnsureMonotonic(value, _completed, nameof(RunStats.Completed));
public void IncrementCompleted(int value = 1) => SetCompleted(_completed + value);
public void SetDeltas(int value) => _deltas = EnsureMonotonic(value, _deltas, nameof(RunStats.Deltas));
public void IncrementDeltas(int value = 1) => SetDeltas(_deltas + value);
public void SetNewCriticals(int value) => _newCriticals = EnsureMonotonic(value, _newCriticals, nameof(RunStats.NewCriticals));
public void IncrementNewCriticals(int value = 1) => SetNewCriticals(_newCriticals + value);
public void SetNewHigh(int value) => _newHigh = EnsureMonotonic(value, _newHigh, nameof(RunStats.NewHigh));
public void IncrementNewHigh(int value = 1) => SetNewHigh(_newHigh + value);
public void SetNewMedium(int value) => _newMedium = EnsureMonotonic(value, _newMedium, nameof(RunStats.NewMedium));
public void IncrementNewMedium(int value = 1) => SetNewMedium(_newMedium + value);
public void SetNewLow(int value) => _newLow = EnsureMonotonic(value, _newLow, nameof(RunStats.NewLow));
public void IncrementNewLow(int value = 1) => SetNewLow(_newLow + value);
public RunStats Build()
=> new(
candidates: _candidates,
deduped: _deduped,
queued: _queued,
completed: _completed,
deltas: _deltas,
newCriticals: _newCriticals,
newHigh: _newHigh,
newMedium: _newMedium,
newLow: _newLow);
private static int EnsureMonotonic(int value, int current, string fieldName)
{
Validation.EnsureNonNegative(value, fieldName);
if (value < current)
{
throw new InvalidOperationException($"RunStats.{fieldName} cannot decrease (current: {current}, attempted: {value}).");
}
return value;
}
}

View File

@@ -0,0 +1,227 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Scheduler configuration entity persisted in Mongo.
/// </summary>
public sealed record Schedule
{
public Schedule(
string id,
string tenantId,
string name,
bool enabled,
string cronExpression,
string timezone,
ScheduleMode mode,
Selector selection,
ScheduleOnlyIf? onlyIf,
ScheduleNotify? notify,
ScheduleLimits? limits,
DateTimeOffset createdAt,
string createdBy,
DateTimeOffset updatedAt,
string updatedBy,
ImmutableArray<string>? subscribers = null,
string? schemaVersion = null)
: this(
id,
tenantId,
name,
enabled,
cronExpression,
timezone,
mode,
selection,
onlyIf ?? ScheduleOnlyIf.Default,
notify ?? ScheduleNotify.Default,
limits ?? ScheduleLimits.Default,
subscribers ?? ImmutableArray<string>.Empty,
createdAt,
createdBy,
updatedAt,
updatedBy,
schemaVersion)
{
}
[JsonConstructor]
public Schedule(
string id,
string tenantId,
string name,
bool enabled,
string cronExpression,
string timezone,
ScheduleMode mode,
Selector selection,
ScheduleOnlyIf onlyIf,
ScheduleNotify notify,
ScheduleLimits limits,
ImmutableArray<string> subscribers,
DateTimeOffset createdAt,
string createdBy,
DateTimeOffset updatedAt,
string updatedBy,
string? schemaVersion = null)
{
Id = Validation.EnsureId(id, nameof(id));
TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId));
Name = Validation.EnsureName(name, nameof(name));
Enabled = enabled;
CronExpression = Validation.EnsureCronExpression(cronExpression, nameof(cronExpression));
Timezone = Validation.EnsureTimezone(timezone, nameof(timezone));
Mode = mode;
Selection = selection ?? throw new ArgumentNullException(nameof(selection));
OnlyIf = onlyIf ?? ScheduleOnlyIf.Default;
Notify = notify ?? ScheduleNotify.Default;
Limits = limits ?? ScheduleLimits.Default;
Subscribers = (subscribers.IsDefault ? ImmutableArray<string>.Empty : subscribers)
.Select(static value => Validation.EnsureSimpleIdentifier(value, nameof(subscribers)))
.Distinct(StringComparer.Ordinal)
.OrderBy(static value => value, StringComparer.Ordinal)
.ToImmutableArray();
CreatedAt = Validation.NormalizeTimestamp(createdAt);
CreatedBy = Validation.EnsureSimpleIdentifier(createdBy, nameof(createdBy));
UpdatedAt = Validation.NormalizeTimestamp(updatedAt);
UpdatedBy = Validation.EnsureSimpleIdentifier(updatedBy, nameof(updatedBy));
SchemaVersion = SchedulerSchemaVersions.EnsureSchedule(schemaVersion);
if (Selection.TenantId is not null && !string.Equals(Selection.TenantId, TenantId, StringComparison.Ordinal))
{
throw new ArgumentException("Selection tenant must match schedule tenant.", nameof(selection));
}
}
public string SchemaVersion { get; }
public string Id { get; }
public string TenantId { get; }
public string Name { get; }
public bool Enabled { get; }
public string CronExpression { get; }
public string Timezone { get; }
public ScheduleMode Mode { get; }
public Selector Selection { get; }
public ScheduleOnlyIf OnlyIf { get; }
public ScheduleNotify Notify { get; }
public ScheduleLimits Limits { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Subscribers { get; } = ImmutableArray<string>.Empty;
public DateTimeOffset CreatedAt { get; }
public string CreatedBy { get; }
public DateTimeOffset UpdatedAt { get; }
public string UpdatedBy { get; }
}
/// <summary>
/// Conditions that must hold before a schedule enqueues work.
/// </summary>
public sealed record ScheduleOnlyIf
{
public static ScheduleOnlyIf Default { get; } = new();
[JsonConstructor]
public ScheduleOnlyIf(int? lastReportOlderThanDays = null, string? policyRevision = null)
{
LastReportOlderThanDays = Validation.EnsurePositiveOrNull(lastReportOlderThanDays, nameof(lastReportOlderThanDays));
PolicyRevision = Validation.TrimToNull(policyRevision);
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? LastReportOlderThanDays { get; } = null;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PolicyRevision { get; } = null;
}
/// <summary>
/// Notification preferences for schedule outcomes.
/// </summary>
public sealed record ScheduleNotify
{
public static ScheduleNotify Default { get; } = new(onNewFindings: true, null, includeKev: true);
public ScheduleNotify(bool onNewFindings, SeverityRank? minSeverity, bool includeKev)
{
OnNewFindings = onNewFindings;
if (minSeverity is SeverityRank.Unknown or SeverityRank.None)
{
MinSeverity = minSeverity == SeverityRank.Unknown ? SeverityRank.Unknown : SeverityRank.Low;
}
else
{
MinSeverity = minSeverity;
}
IncludeKev = includeKev;
}
[JsonConstructor]
public ScheduleNotify(bool onNewFindings, SeverityRank? minSeverity, bool includeKev, bool includeQuietFindings = false)
: this(onNewFindings, minSeverity, includeKev)
{
IncludeQuietFindings = includeQuietFindings;
}
public bool OnNewFindings { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public SeverityRank? MinSeverity { get; }
public bool IncludeKev { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool IncludeQuietFindings { get; }
}
/// <summary>
/// Execution limits that bound scheduler throughput.
/// </summary>
public sealed record ScheduleLimits
{
public static ScheduleLimits Default { get; } = new();
public ScheduleLimits(int? maxJobs = null, int? ratePerSecond = null, int? parallelism = null)
{
MaxJobs = Validation.EnsurePositiveOrNull(maxJobs, nameof(maxJobs));
RatePerSecond = Validation.EnsurePositiveOrNull(ratePerSecond, nameof(ratePerSecond));
Parallelism = Validation.EnsurePositiveOrNull(parallelism, nameof(parallelism));
}
[JsonConstructor]
public ScheduleLimits(int? maxJobs, int? ratePerSecond, int? parallelism, int? burst = null)
: this(maxJobs, ratePerSecond, parallelism)
{
Burst = Validation.EnsurePositiveOrNull(burst, nameof(burst));
}
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? MaxJobs { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? RatePerSecond { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? Parallelism { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? Burst { get; }
}

View File

@@ -0,0 +1,454 @@
using System.Collections.Immutable;
using System.Globalization;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Upgrades scheduler documents emitted by earlier schema revisions to the latest DTOs.
/// </summary>
public static class SchedulerSchemaMigration
{
private static readonly ImmutableHashSet<string> ScheduleProperties = ImmutableHashSet.Create(
StringComparer.Ordinal,
"schemaVersion",
"id",
"tenantId",
"name",
"enabled",
"cronExpression",
"timezone",
"mode",
"selection",
"onlyIf",
"notify",
"limits",
"subscribers",
"createdAt",
"createdBy",
"updatedAt",
"updatedBy");
private static readonly ImmutableHashSet<string> RunProperties = ImmutableHashSet.Create(
StringComparer.Ordinal,
"schemaVersion",
"id",
"tenantId",
"scheduleId",
"trigger",
"state",
"stats",
"reason",
"createdAt",
"startedAt",
"finishedAt",
"error",
"deltas");
private static readonly ImmutableHashSet<string> ImpactSetProperties = ImmutableHashSet.Create(
StringComparer.Ordinal,
"schemaVersion",
"selector",
"images",
"usageOnly",
"generatedAt",
"total",
"snapshotId");
public static SchedulerSchemaMigrationResult<Schedule> UpgradeSchedule(JsonNode document, bool strict = false)
=> Upgrade(
document,
SchedulerSchemaVersions.Schedule,
SchedulerSchemaVersions.EnsureSchedule,
ScheduleProperties,
static json => CanonicalJsonSerializer.Deserialize<Schedule>(json),
ApplyScheduleLegacyFixups,
strict);
public static SchedulerSchemaMigrationResult<Run> UpgradeRun(JsonNode document, bool strict = false)
=> Upgrade(
document,
SchedulerSchemaVersions.Run,
SchedulerSchemaVersions.EnsureRun,
RunProperties,
static json => CanonicalJsonSerializer.Deserialize<Run>(json),
ApplyRunLegacyFixups,
strict);
public static SchedulerSchemaMigrationResult<ImpactSet> UpgradeImpactSet(JsonNode document, bool strict = false)
=> Upgrade(
document,
SchedulerSchemaVersions.ImpactSet,
SchedulerSchemaVersions.EnsureImpactSet,
ImpactSetProperties,
static json => CanonicalJsonSerializer.Deserialize<ImpactSet>(json),
ApplyImpactSetLegacyFixups,
strict);
private static SchedulerSchemaMigrationResult<T> Upgrade<T>(
JsonNode document,
string latestVersion,
Func<string?, string> ensureVersion,
ImmutableHashSet<string> knownProperties,
Func<string, T> deserialize,
Func<JsonObject, string, ImmutableArray<string>.Builder, bool> applyLegacyFixups,
bool strict)
{
ArgumentNullException.ThrowIfNull(document);
var (normalized, fromVersion) = Normalize(document, ensureVersion);
var warnings = ImmutableArray.CreateBuilder<string>();
if (!string.Equals(fromVersion, latestVersion, StringComparison.Ordinal))
{
var upgraded = applyLegacyFixups(normalized, fromVersion, warnings);
if (!upgraded)
{
throw new NotSupportedException($"Unsupported scheduler schema version '{fromVersion}', expected '{latestVersion}'.");
}
normalized["schemaVersion"] = latestVersion;
}
if (strict)
{
RemoveUnknownMembers(normalized, knownProperties, warnings, fromVersion);
}
var canonicalJson = normalized.ToJsonString(new JsonSerializerOptions
{
WriteIndented = false,
});
var value = deserialize(canonicalJson);
return new SchedulerSchemaMigrationResult<T>(
value,
fromVersion,
latestVersion,
warnings.ToImmutable());
}
private static (JsonObject Clone, string SchemaVersion) Normalize(JsonNode node, Func<string?, string> ensureVersion)
{
if (node is not JsonObject obj)
{
throw new ArgumentException("Document must be a JSON object.", nameof(node));
}
if (obj.DeepClone() is not JsonObject clone)
{
throw new InvalidOperationException("Unable to clone scheduler document.");
}
string schemaVersion;
if (clone.TryGetPropertyValue("schemaVersion", out var value) &&
value is JsonValue jsonValue &&
jsonValue.TryGetValue(out string? rawVersion))
{
schemaVersion = ensureVersion(rawVersion);
}
else
{
schemaVersion = ensureVersion(null);
clone["schemaVersion"] = schemaVersion;
}
// Ensure schemaVersion is normalized in the clone.
clone["schemaVersion"] = schemaVersion;
return (clone, schemaVersion);
}
private static void RemoveUnknownMembers(
JsonObject json,
ImmutableHashSet<string> knownProperties,
ImmutableArray<string>.Builder warnings,
string schemaVersion)
{
var unknownKeys = json
.Where(static pair => pair.Key is not null)
.Select(pair => pair.Key!)
.Where(key => !knownProperties.Contains(key))
.ToArray();
foreach (var key in unknownKeys)
{
json.Remove(key);
warnings.Add($"Removed unknown property '{key}' from scheduler document (schemaVersion={schemaVersion}).");
}
}
private static bool ApplyScheduleLegacyFixups(
JsonObject json,
string fromVersion,
ImmutableArray<string>.Builder warnings)
{
switch (fromVersion)
{
case SchedulerSchemaVersions.ScheduleLegacy0:
var limits = EnsureObject(json, "limits", () => new JsonObject(), warnings, "schedule", fromVersion);
NormalizePositiveInt(limits, "maxJobs", warnings, "schedule.limits", fromVersion);
NormalizePositiveInt(limits, "ratePerSecond", warnings, "schedule.limits", fromVersion);
NormalizePositiveInt(limits, "parallelism", warnings, "schedule.limits", fromVersion);
NormalizePositiveInt(limits, "burst", warnings, "schedule.limits", fromVersion);
var notify = EnsureObject(json, "notify", () => new JsonObject(), warnings, "schedule", fromVersion);
NormalizeBoolean(notify, "onNewFindings", defaultValue: true, warnings, "schedule.notify", fromVersion);
NormalizeSeverity(notify, "minSeverity", warnings, "schedule.notify", fromVersion);
NormalizeBoolean(notify, "includeKev", defaultValue: true, warnings, "schedule.notify", fromVersion);
NormalizeBoolean(notify, "includeQuietFindings", defaultValue: false, warnings, "schedule.notify", fromVersion);
var onlyIf = EnsureObject(json, "onlyIf", () => new JsonObject(), warnings, "schedule", fromVersion);
NormalizePositiveInt(onlyIf, "lastReportOlderThanDays", warnings, "schedule.onlyIf", fromVersion, allowZero: false);
EnsureArray(json, "subscribers", warnings, "schedule", fromVersion);
return true;
default:
return false;
}
}
private static bool ApplyRunLegacyFixups(
JsonObject json,
string fromVersion,
ImmutableArray<string>.Builder warnings)
{
switch (fromVersion)
{
case SchedulerSchemaVersions.RunLegacy0:
var stats = EnsureObject(json, "stats", () => new JsonObject(), warnings, "run", fromVersion);
NormalizeNonNegativeInt(stats, "candidates", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "deduped", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "queued", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "completed", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "deltas", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "newCriticals", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "newHigh", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "newMedium", warnings, "run.stats", fromVersion);
NormalizeNonNegativeInt(stats, "newLow", warnings, "run.stats", fromVersion);
EnsureObject(json, "reason", () => new JsonObject(), warnings, "run", fromVersion);
EnsureArray(json, "deltas", warnings, "run", fromVersion);
return true;
default:
return false;
}
}
private static bool ApplyImpactSetLegacyFixups(
JsonObject json,
string fromVersion,
ImmutableArray<string>.Builder warnings)
{
switch (fromVersion)
{
case SchedulerSchemaVersions.ImpactSetLegacy0:
var images = EnsureArray(json, "images", warnings, "impact-set", fromVersion);
NormalizeBoolean(json, "usageOnly", defaultValue: false, warnings, "impact-set", fromVersion);
if (!json.TryGetPropertyValue("total", out var totalNode) || !TryReadNonNegative(totalNode, out var total))
{
var computed = images.Count;
json["total"] = computed;
warnings.Add($"Backfilled impact set total with image count ({computed}) while upgrading from {fromVersion}.");
}
else
{
var computed = images.Count;
if (total != computed)
{
json["total"] = computed;
warnings.Add($"Normalized impact set total to image count ({computed}) while upgrading from {fromVersion}.");
}
}
return true;
default:
return false;
}
}
private static JsonObject EnsureObject(
JsonObject parent,
string propertyName,
Func<JsonObject> factory,
ImmutableArray<string>.Builder warnings,
string context,
string fromVersion)
{
if (parent.TryGetPropertyValue(propertyName, out var node) && node is JsonObject obj)
{
return obj;
}
var created = factory();
parent[propertyName] = created;
warnings.Add($"Inserted default '{context}.{propertyName}' object while upgrading from {fromVersion}.");
return created;
}
private static JsonArray EnsureArray(
JsonObject parent,
string propertyName,
ImmutableArray<string>.Builder warnings,
string context,
string fromVersion)
{
if (parent.TryGetPropertyValue(propertyName, out var node) && node is JsonArray array)
{
return array;
}
var created = new JsonArray();
parent[propertyName] = created;
warnings.Add($"Inserted empty '{context}.{propertyName}' array while upgrading from {fromVersion}.");
return created;
}
private static void NormalizePositiveInt(
JsonObject obj,
string propertyName,
ImmutableArray<string>.Builder warnings,
string context,
string fromVersion,
bool allowZero = false)
{
if (!obj.TryGetPropertyValue(propertyName, out var node))
{
return;
}
if (!TryReadInt(node, out var value))
{
obj.Remove(propertyName);
warnings.Add($"Removed invalid '{context}.{propertyName}' while upgrading from {fromVersion}.");
return;
}
if ((!allowZero && value <= 0) || (allowZero && value < 0))
{
obj.Remove(propertyName);
warnings.Add($"Removed non-positive '{context}.{propertyName}' value while upgrading from {fromVersion}.");
return;
}
obj[propertyName] = value;
}
private static void NormalizeNonNegativeInt(
JsonObject obj,
string propertyName,
ImmutableArray<string>.Builder warnings,
string context,
string fromVersion)
{
if (!obj.TryGetPropertyValue(propertyName, out var node) || !TryReadNonNegative(node, out var value))
{
obj[propertyName] = 0;
warnings.Add($"Defaulted '{context}.{propertyName}' to 0 while upgrading from {fromVersion}.");
return;
}
obj[propertyName] = value;
}
private static void NormalizeBoolean(
JsonObject obj,
string propertyName,
bool defaultValue,
ImmutableArray<string>.Builder warnings,
string context,
string fromVersion)
{
if (!obj.TryGetPropertyValue(propertyName, out var node))
{
obj[propertyName] = defaultValue;
warnings.Add($"Defaulted '{context}.{propertyName}' to {defaultValue.ToString().ToLowerInvariant()} while upgrading from {fromVersion}.");
return;
}
if (node is JsonValue value && value.TryGetValue(out bool parsed))
{
obj[propertyName] = parsed;
return;
}
if (node is JsonValue strValue && strValue.TryGetValue(out string? text) &&
bool.TryParse(text, out var parsedFromString))
{
obj[propertyName] = parsedFromString;
return;
}
obj[propertyName] = defaultValue;
warnings.Add($"Normalized '{context}.{propertyName}' to {defaultValue.ToString().ToLowerInvariant()} while upgrading from {fromVersion}.");
}
private static void NormalizeSeverity(
JsonObject obj,
string propertyName,
ImmutableArray<string>.Builder warnings,
string context,
string fromVersion)
{
if (!obj.TryGetPropertyValue(propertyName, out var node))
{
return;
}
if (node is JsonValue value)
{
if (value.TryGetValue(out string? text))
{
if (Enum.TryParse<SeverityRank>(text, ignoreCase: true, out var parsed))
{
obj[propertyName] = parsed.ToString().ToLowerInvariant();
return;
}
}
if (value.TryGetValue(out int numeric) && Enum.IsDefined(typeof(SeverityRank), numeric))
{
var enumValue = (SeverityRank)numeric;
obj[propertyName] = enumValue.ToString().ToLowerInvariant();
return;
}
}
obj.Remove(propertyName);
warnings.Add($"Removed invalid '{context}.{propertyName}' while upgrading from {fromVersion}.");
}
private static bool TryReadNonNegative(JsonNode? node, out int value)
=> TryReadInt(node, out value) && value >= 0;
private static bool TryReadInt(JsonNode? node, out int value)
{
if (node is JsonValue valueNode)
{
if (valueNode.TryGetValue(out int intValue))
{
value = intValue;
return true;
}
if (valueNode.TryGetValue(out long longValue) && longValue is >= int.MinValue and <= int.MaxValue)
{
value = (int)longValue;
return true;
}
if (valueNode.TryGetValue(out string? text) &&
int.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
value = parsed;
return true;
}
}
value = 0;
return false;
}
}

View File

@@ -0,0 +1,13 @@
using System.Collections.Immutable;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Result from upgrading a scheduler document to the latest schema version.
/// </summary>
/// <typeparam name="T">Target DTO type.</typeparam>
public sealed record SchedulerSchemaMigrationResult<T>(
T Value,
string FromVersion,
string ToVersion,
ImmutableArray<string> Warnings);

View File

@@ -0,0 +1,54 @@
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Canonical schema version identifiers for scheduler documents.
/// </summary>
public static class SchedulerSchemaVersions
{
public const string Schedule = "scheduler.schedule@1";
public const string Run = "scheduler.run@1";
public const string ImpactSet = "scheduler.impact-set@1";
public const string GraphBuildJob = "scheduler.graph-build-job@1";
public const string GraphOverlayJob = "scheduler.graph-overlay-job@1";
public const string PolicyRunRequest = "scheduler.policy-run-request@1";
public const string PolicyRunStatus = "scheduler.policy-run-status@1";
public const string PolicyDiffSummary = "scheduler.policy-diff-summary@1";
public const string PolicyExplainTrace = "scheduler.policy-explain-trace@1";
public const string PolicyRunJob = "scheduler.policy-run-job@1";
public const string ScheduleLegacy0 = "scheduler.schedule@0";
public const string RunLegacy0 = "scheduler.run@0";
public const string ImpactSetLegacy0 = "scheduler.impact-set@0";
public static string EnsureSchedule(string? value)
=> Normalize(value, Schedule);
public static string EnsureRun(string? value)
=> Normalize(value, Run);
public static string EnsureImpactSet(string? value)
=> Normalize(value, ImpactSet);
public static string EnsureGraphBuildJob(string? value)
=> Normalize(value, GraphBuildJob);
public static string EnsureGraphOverlayJob(string? value)
=> Normalize(value, GraphOverlayJob);
public static string EnsurePolicyRunRequest(string? value)
=> Normalize(value, PolicyRunRequest);
public static string EnsurePolicyRunStatus(string? value)
=> Normalize(value, PolicyRunStatus);
public static string EnsurePolicyDiffSummary(string? value)
=> Normalize(value, PolicyDiffSummary);
public static string EnsurePolicyExplainTrace(string? value)
=> Normalize(value, PolicyExplainTrace);
public static string EnsurePolicyRunJob(string? value)
=> Normalize(value, PolicyRunJob);
private static string Normalize(string? value, string fallback)
=> string.IsNullOrWhiteSpace(value) ? fallback : value.Trim();
}

View File

@@ -0,0 +1,134 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Selector filters used to resolve impacted assets.
/// </summary>
public sealed record Selector
{
public Selector(
SelectorScope scope,
string? tenantId = null,
IEnumerable<string>? namespaces = null,
IEnumerable<string>? repositories = null,
IEnumerable<string>? digests = null,
IEnumerable<string>? includeTags = null,
IEnumerable<LabelSelector>? labels = null,
bool resolvesTags = false)
: this(
scope,
tenantId,
Validation.NormalizeStringSet(namespaces, nameof(namespaces)),
Validation.NormalizeStringSet(repositories, nameof(repositories)),
Validation.NormalizeDigests(digests, nameof(digests)),
Validation.NormalizeTagPatterns(includeTags),
NormalizeLabels(labels),
resolvesTags)
{
}
[JsonConstructor]
public Selector(
SelectorScope scope,
string? tenantId,
ImmutableArray<string> namespaces,
ImmutableArray<string> repositories,
ImmutableArray<string> digests,
ImmutableArray<string> includeTags,
ImmutableArray<LabelSelector> labels,
bool resolvesTags)
{
Scope = scope;
TenantId = tenantId is null ? null : Validation.EnsureTenantId(tenantId, nameof(tenantId));
Namespaces = namespaces.IsDefault ? ImmutableArray<string>.Empty : namespaces;
Repositories = repositories.IsDefault ? ImmutableArray<string>.Empty : repositories;
Digests = digests.IsDefault ? ImmutableArray<string>.Empty : digests;
IncludeTags = includeTags.IsDefault ? ImmutableArray<string>.Empty : includeTags;
Labels = labels.IsDefault ? ImmutableArray<LabelSelector>.Empty : labels;
ResolvesTags = resolvesTags;
if (Scope is SelectorScope.ByDigest && Digests.Length == 0)
{
throw new ArgumentException("At least one digest is required when scope is by-digest.", nameof(digests));
}
if (Scope is SelectorScope.ByNamespace && Namespaces.Length == 0)
{
throw new ArgumentException("Namespaces are required when scope is by-namespace.", nameof(namespaces));
}
if (Scope is SelectorScope.ByRepository && Repositories.Length == 0)
{
throw new ArgumentException("Repositories are required when scope is by-repo.", nameof(repositories));
}
if (Scope is SelectorScope.ByLabels && Labels.Length == 0)
{
throw new ArgumentException("Labels are required when scope is by-labels.", nameof(labels));
}
}
public SelectorScope Scope { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? TenantId { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Namespaces { get; } = ImmutableArray<string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Repositories { get; } = ImmutableArray<string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Digests { get; } = ImmutableArray<string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> IncludeTags { get; } = ImmutableArray<string>.Empty;
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<LabelSelector> Labels { get; } = ImmutableArray<LabelSelector>.Empty;
public bool ResolvesTags { get; }
private static ImmutableArray<LabelSelector> NormalizeLabels(IEnumerable<LabelSelector>? labels)
{
if (labels is null)
{
return ImmutableArray<LabelSelector>.Empty;
}
return labels
.Where(static label => label is not null)
.Select(static label => label!)
.OrderBy(static label => label.Key, StringComparer.Ordinal)
.ToImmutableArray();
}
}
/// <summary>
/// Describes a label match (key and optional accepted values).
/// </summary>
public sealed record LabelSelector
{
public LabelSelector(string key, IEnumerable<string>? values = null)
: this(key, NormalizeValues(values))
{
}
[JsonConstructor]
public LabelSelector(string key, ImmutableArray<string> values)
{
Key = Validation.EnsureSimpleIdentifier(key, nameof(key));
Values = values.IsDefault ? ImmutableArray<string>.Empty : values;
}
public string Key { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Values { get; } = ImmutableArray<string>.Empty;
private static ImmutableArray<string> NormalizeValues(IEnumerable<string>? values)
=> Validation.NormalizeStringSet(values, nameof(values));
}

View File

@@ -0,0 +1,9 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,22 @@
# Scheduler Models Task Board (Sprint 16)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCHED-MODELS-16-101 | DONE (2025-10-19) | Scheduler Models Guild | — | Define DTOs (Schedule, Run, ImpactSet, Selector, DeltaSummary, AuditRecord) with validation + canonical JSON. | DTOs merged with tests; documentation snippet added; serialization deterministic. |
| SCHED-MODELS-16-102 | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Publish schema docs & sample payloads for UI/Notify integration. | Samples committed; docs referenced; contract tests pass. |
| SCHED-MODELS-16-103 | DONE (2025-10-20) | Scheduler Models Guild | SCHED-MODELS-16-101 | Versioning/migration helpers (schedule evolution, run state transitions). | Migration helpers implemented; tests cover upgrade/downgrade; guidelines documented. |
## Policy Engine v2 (Sprint 20)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCHED-MODELS-20-001 | DONE (2025-10-26) | Scheduler Models Guild, Policy Guild | POLICY-ENGINE-20-000 | Define DTOs/schemas for policy runs, diffs, and explain traces (`PolicyRunRequest`, `PolicyRunStatus`, `PolicyDiffSummary`). | DTOs serialize deterministically; schema samples committed; validation helpers added. |
| SCHED-MODELS-20-002 | DONE (2025-10-29) | Scheduler Models Guild | SCHED-MODELS-20-001 | Extend scheduler schema docs to include policy run lifecycle, environment metadata, and diff payloads. | Docs updated with compliance checklist; samples validated against JSON schema; consumers notified. |
> 2025-10-29: Added lifecycle table, environment metadata section, and diff payload breakdown to `SCHED-MODELS-20-001-POLICY-RUNS.md`; compliance checklist extended to cover new documentation.
## Graph Explorer v1 (Sprint 21)
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCHED-MODELS-21-001 | DONE (2025-10-26) | Scheduler Models Guild, Cartographer Guild | CARTO-GRAPH-21-007 | Define job DTOs for graph builds/overlay refresh (`GraphBuildJob`, `GraphOverlayJob`) with deterministic serialization and status enums. | DTOs serialized deterministically; schema snippets documented in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md`; tests cover transitions. |
| SCHED-MODELS-21-002 | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-21-001 | Publish schema docs/sample payloads for graph jobs and overlay events for downstream workers/UI. | Docs updated with compliance checklist; samples validated; notifications sent to guilds. |

View File

@@ -0,0 +1,247 @@
using System.Collections.Immutable;
using System.Text.RegularExpressions;
namespace StellaOps.Scheduler.Models;
/// <summary>
/// Lightweight validation helpers for scheduler DTO constructors.
/// </summary>
internal static partial class Validation
{
private const int MaxIdentifierLength = 256;
private const int MaxNameLength = 200;
public static string EnsureId(string value, string paramName)
{
var normalized = EnsureNotNullOrWhiteSpace(value, paramName);
if (normalized.Length > MaxIdentifierLength)
{
throw new ArgumentException($"Value exceeds {MaxIdentifierLength} characters.", paramName);
}
return normalized;
}
public static string EnsureName(string value, string paramName)
{
var normalized = EnsureNotNullOrWhiteSpace(value, paramName);
if (normalized.Length > MaxNameLength)
{
throw new ArgumentException($"Value exceeds {MaxNameLength} characters.", paramName);
}
return normalized;
}
public static string EnsureTenantId(string value, string paramName)
{
var normalized = EnsureId(value, paramName);
if (!TenantRegex().IsMatch(normalized))
{
throw new ArgumentException("Tenant id must be alphanumeric with '-', '_' separators.", paramName);
}
return normalized;
}
public static string EnsureCronExpression(string value, string paramName)
{
var normalized = EnsureNotNullOrWhiteSpace(value, paramName);
if (normalized.Length > 128 || normalized.Contains('\n', StringComparison.Ordinal) || normalized.Contains('\r', StringComparison.Ordinal))
{
throw new ArgumentException("Cron expression too long or contains invalid characters.", paramName);
}
if (!CronSegmentRegex().IsMatch(normalized))
{
throw new ArgumentException("Cron expression contains unsupported characters.", paramName);
}
return normalized;
}
public static string EnsureTimezone(string value, string paramName)
{
var normalized = EnsureNotNullOrWhiteSpace(value, paramName);
try
{
_ = TimeZoneInfo.FindSystemTimeZoneById(normalized);
}
catch (TimeZoneNotFoundException ex)
{
throw new ArgumentException($"Timezone '{normalized}' is not recognized on this host.", paramName, ex);
}
catch (InvalidTimeZoneException ex)
{
throw new ArgumentException($"Timezone '{normalized}' is invalid.", paramName, ex);
}
return normalized;
}
public static string? TrimToNull(string? value)
=> string.IsNullOrWhiteSpace(value)
? null
: value.Trim();
public static ImmutableArray<string> NormalizeStringSet(IEnumerable<string>? values, string paramName, bool allowWildcards = false)
{
if (values is null)
{
return ImmutableArray<string>.Empty;
}
var result = values
.Select(static value => TrimToNull(value))
.Where(static value => value is not null)
.Select(value => allowWildcards ? value! : EnsureSimpleIdentifier(value!, paramName))
.Distinct(StringComparer.Ordinal)
.OrderBy(static value => value, StringComparer.Ordinal)
.ToImmutableArray();
return result;
}
public static ImmutableArray<string> NormalizeTagPatterns(IEnumerable<string>? values)
{
if (values is null)
{
return ImmutableArray<string>.Empty;
}
var result = values
.Select(static value => TrimToNull(value))
.Where(static value => value is not null)
.Select(static value => value!)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return result;
}
public static ImmutableArray<string> NormalizeDigests(IEnumerable<string>? values, string paramName)
{
if (values is null)
{
return ImmutableArray<string>.Empty;
}
var result = values
.Select(static value => TrimToNull(value))
.Where(static value => value is not null)
.Select(value => EnsureDigestFormat(value!, paramName))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(static value => value, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return result;
}
public static int? EnsurePositiveOrNull(int? value, string paramName)
{
if (value is null)
{
return null;
}
if (value <= 0)
{
throw new ArgumentOutOfRangeException(paramName, value, "Value must be greater than zero.");
}
return value;
}
public static int EnsureNonNegative(int value, string paramName)
{
if (value < 0)
{
throw new ArgumentOutOfRangeException(paramName, value, "Value must be zero or greater.");
}
return value;
}
public static ImmutableSortedDictionary<string, string> NormalizeMetadata(IEnumerable<KeyValuePair<string, string>>? metadata)
{
if (metadata is null)
{
return ImmutableSortedDictionary<string, string>.Empty;
}
var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal);
foreach (var pair in metadata)
{
var key = TrimToNull(pair.Key);
var value = TrimToNull(pair.Value);
if (key is null || value is null)
{
continue;
}
var normalizedKey = key.ToLowerInvariant();
if (!builder.ContainsKey(normalizedKey))
{
builder[normalizedKey] = value;
}
}
return builder.ToImmutable();
}
public static string EnsureSimpleIdentifier(string value, string paramName)
{
var normalized = EnsureNotNullOrWhiteSpace(value, paramName);
if (!SimpleIdentifierRegex().IsMatch(normalized))
{
throw new ArgumentException("Value must contain letters, digits, '-', '_', '.', or '/'.", paramName);
}
return normalized;
}
public static string EnsureDigestFormat(string value, string paramName)
{
var normalized = EnsureNotNullOrWhiteSpace(value, paramName).ToLowerInvariant();
if (!normalized.StartsWith("sha256:", StringComparison.Ordinal) || normalized.Length <= 7)
{
throw new ArgumentException("Digest must start with 'sha256:' and contain a hex payload.", paramName);
}
if (!HexRegex().IsMatch(normalized.AsSpan(7)))
{
throw new ArgumentException("Digest must be hexadecimal.", paramName);
}
return normalized;
}
public static string EnsureNotNullOrWhiteSpace(string value, string paramName)
{
if (string.IsNullOrWhiteSpace(value))
{
throw new ArgumentException("Value cannot be null or whitespace.", paramName);
}
return value.Trim();
}
public static DateTimeOffset NormalizeTimestamp(DateTimeOffset value)
=> value.ToUniversalTime();
public static DateTimeOffset? NormalizeTimestamp(DateTimeOffset? value)
=> value?.ToUniversalTime();
[GeneratedRegex("^[A-Za-z0-9_-]+$")]
private static partial Regex TenantRegex();
[GeneratedRegex("^[A-Za-z0-9_./:@+\\-]+$")]
private static partial Regex SimpleIdentifierRegex();
[GeneratedRegex("^[A-Za-z0-9:*?/_.,\\- ]+$")]
private static partial Regex CronSegmentRegex();
[GeneratedRegex("^[a-f0-9]+$", RegexOptions.IgnoreCase)]
private static partial Regex HexRegex();
}

View File

@@ -0,0 +1,86 @@
# SCHED-MODELS-16-103 — Scheduler Schema Versioning & Run State Helpers
## Goals
- Track schema revisions for `Schedule` and `Run` documents so storage upgrades are deterministic across air-gapped installs.
- Provide reusable upgrade helpers that normalize Mongo snapshots (raw BSON → JSON) into the latest DTOs without mutating inputs.
- Formalize the allowed `RunState` graph and surface guard-rail helpers (timestamps, stats monotonicity) for planners/runners.
## Non-goals
- Implementing the helpers (covered by the main task).
- Downgrading documents to legacy schema revisions (can be added if Offline Kit requires it).
- Persisted data backfills or data migration jobs; we focus on in-process upgrades during read.
## Schema Version Strategy
- Introduce `SchedulerSchemaVersions` constants:
- `scheduler.schedule@1` (base record with subscribers, limits burst default).
- `scheduler.run@1` (run metadata + delta summaries).
- `scheduler.impact-set@1` (shared envelope used by planners).
- Expose `EnsureSchedule`, `EnsureRun`, `EnsureImpactSet` helpers mirroring the Notify model pattern to normalize missing/whitespace values.
- Extend `Schedule`, `Run`, and `ImpactSet` records with an optional `schemaVersion` constructor parameter defaulting through the `Ensure*` helpers. The canonical JSON serializer will list `schemaVersion` first so documents round-trip deterministically.
- Persisted Mongo documents will now always include `schemaVersion`; exporters/backups can rely on this when bundling Offline Kit snapshots.
## Migration Helper Shape
- Add `SchedulerSchemaMigration` static class with:
- `Schedule UpgradeSchedule(JsonNode document)`
- `Run UpgradeRun(JsonNode document)`
- `ImpactSet UpgradeImpactSet(JsonNode document)`
- Each method clones the incoming node, normalizes `schemaVersion` (injecting default if missing), then applies an upgrade pipeline:
1. `Normalize` — ensure object, strip unknown members when `strict` flag is set, coerce enums via converters.
2. `ApplyLegacyFixups` — version-specific patches, e.g., backfill `subscribers`, migrate `limits.burst`, convert legacy trigger strings.
3. `Deserialize` — use `CanonicalJsonSerializer.Deserialize<T>` so property order/enum parsing stays centralized.
- Expose `SchedulerSchemaMigrationResult<T>` record returning `(T Value, string FromVersion, string ToVersion, ImmutableArray<string> Warnings)` to surface non-blocking issues to callers (web service, worker, storage).
- Helpers remain dependency-free so storage/web modules can reference them without circular dependencies.
## Schedule Evolution Considerations
- **@1** fields: `mode`, `selection`, `onlyIf`, `notify`, `limits` (incl. `burst` default 0), `subscribers` (sorted unique), audit metadata.
- Future **@2** candidate changes to plan for in helpers:
- `limits`: splitting `parallelism` into planner/runner concurrency.
- `selection`: adding `impactWindow` semantics.
- `notify`: optional per-channel overrides.
- Upgrade pipeline will carry forward unknown fields in a `JsonNode` bag so future versions can opt-in to strict dropping while maintaining backwards compatibility for current release.
## Run State Transition Helper
- Introduce `RunStateMachine` (static) encapsulating allowed transitions and invariants.
- Define adjacency map:
- `Planning → {Queued, Cancelled}`
- `Queued → {Running, Cancelled}`
- `Running → {Completed, Error, Cancelled}`
- `Completed`, `Error`, `Cancelled` are terminal.
- Provide `bool CanTransition(RunState from, RunState to)` and `Run EnsureTransition(Run run, RunState next, DateTimeOffset now, Action<RunStatsBuilder>? mutateStats = null)`.
- `EnsureTransition` performs:
- Timestamp enforcement: `StartedAt` auto-populated on first entry into `Running`; `FinishedAt` set when entering any terminal state; ensures monotonic ordering (`CreatedAt ≤ StartedAt ≤ FinishedAt`).
- Stats guardrails: cumulative counters must not decrease; `RunStatsBuilder` wrapper ensures atomic updates.
- Error context: require `error` message when transitioning to `Error`; clear error for non-error entries.
- Provide `Validate(Run run)` to check invariants for documents loaded from storage before use (e.g., stale snapshots).
- Expose small helper to tag `RunReason.ImpactWindowFrom/To` automatically when set by planners (using normalized ISO-8601).
## Interaction Points
- **WebService**: call `SchedulerSchemaMigration.UpgradeSchedule` when returning schedules from Mongo, so clients always see the newest DTO regardless of stored version.
- **Storage.Mongo**: wrap DTO round-trips; the migration helper acts during read, and the state machine ensures updates respect transition rules before writing.
- **Queue/Worker**: use `RunStateMachine.EnsureTransition` to guard planner/runner state updates (replace ad-hoc `with run` clones).
- **Offline Kit**: embed `schemaVersion` in exported JSON/Trivy artifacts; migrations ensure air-gapped upgrades flow without manual scripts.
## Implementation Steps (for follow-up task)
1. Add `SchedulerSchemaVersions` + update DTO constructors/properties.
2. Implement `SchedulerSchemaMigration` helpers and shared `MigrationResult` envelope.
3. Introduce `RunStateMachine` with invariants + supporting `RunStatsBuilder`.
4. Update modules (Storage, WebService, Worker) to use new helpers; add logging around migrations/transitions.
## Test Strategy
- **Migration happy-path**: load sample Mongo fixtures for `schedule@1` and `run@1`, assert `schemaVersion` normalization, deduplicated subscribers, limits defaults. Include snapshots without the version field to exercise defaulting logic.
- **Legacy upgrade cases**: craft synthetic `schedule@0` / `run@0` JSON fragments (missing new fields, using old enum names) and verify version-specific fixups produce the latest DTO while populating `MigrationResult.Warnings`.
- **Strict mode behavior**: attempt to upgrade documents with unexpected properties and ensure warnings/throws align with configuration.
- **Run state transitions**: unit-test `RunStateMachine` for every allowed edge, invalid transitions, and timestamp/error invariants (e.g., `FinishedAt` only set on terminal states). Provide parameterized tests to confirm stats monotonicity enforcement.
- **Serialization determinism**: round-trip upgraded DTOs via `CanonicalJsonSerializer` to confirm property order includes `schemaVersion` first and produces stable hashes.
- **Documentation snippets**: extend module README or API docs with example migrations/run-state usage; verify via doc samples test (if available) or include as part of CI doc linting.
## Open Questions
- Do we need downgrade (`ToVersion`) helpers for Offline Kit exports? (Assumed no for now. Add backlog item if required.)
- Should `ImpactSet` migrations live here or in ImpactIndex module? (Lean towards here because DTO defined in Models; coordinate with ImpactIndex guild if they need specialized upgrades.)
- How do we surface migration warnings to telemetry? Proposal: caller logs `warning` with `MigrationResult.Warnings` immediately after calling helper.
## Status — 2025-10-20
- `SchedulerSchemaMigration` now upgrades legacy `@0` schedule/run/impact-set documents to the `@1` schema, defaulting missing counters/arrays and normalizing booleans & severities. Each backfill emits a warning so storage/web callers can log the mutation.
- `RunStateMachine.EnsureTransition` guards timestamp ordering and stats monotonicity; builders and extension helpers are wired into the scheduler worker/web service plans.
- Tests exercising legacy upgrades live in `StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs`; add new fixtures there when introducing additional schema versions.

View File

@@ -0,0 +1,148 @@
# SCHED-MODELS-20-001 — Policy Engine Run DTOs
> Status: 2025-10-26 — **Complete**
Defines the scheduler contracts that Policy Engine (Epic 2) relies on for orchestration, simulation, and explainability. DTOs serialize with `CanonicalJsonSerializer` to guarantee deterministic ordering, enabling replay and signed artefacts.
## PolicyRunRequest — `scheduler.policy-run-request@1`
Posted by CLI/UI or the orchestrator to enqueue a run. Canonical sample lives at `samples/api/scheduler/policy-run-request.json`.
```jsonc
{
"schemaVersion": "scheduler.policy-run-request@1",
"tenantId": "default",
"policyId": "P-7",
"policyVersion": 4,
"mode": "incremental", // full | incremental | simulate
"priority": "normal", // normal | high | emergency
"runId": "run:P-7:2025-10-26:auto", // optional idempotency key
"queuedAt": "2025-10-26T14:05:00+00:00",
"requestedBy": "user:cli",
"correlationId": "req-...",
"metadata": {"source": "stella policy run", "trigger": "cli"},
"inputs": {
"sbomSet": ["sbom:S-318", "sbom:S-42"], // sorted uniques
"advisoryCursor": "2025-10-26T13:59:00+00:00",
"vexCursor": "2025-10-26T13:58:30+00:00",
"environment": {"exposure": "internet", "sealed": false},
"captureExplain": true
}
}
```
* Environment values accept any JSON primitive/object; keys normalise to lowercase for deterministic hashing.
* `metadata` is optional contextual breadcrumbs (lowercased keys). Use it for orchestrator provenance or offline bundle identifiers.
## PolicyRunStatus — `scheduler.policy-run-status@1`
Captured in `policy_runs` collection and returned by run status APIs. Sample: `samples/api/scheduler/policy-run-status.json`.
```jsonc
{
"schemaVersion": "scheduler.policy-run-status@1",
"runId": "run:P-7:2025-10-26:auto",
"tenantId": "default",
"policyId": "P-7",
"policyVersion": 4,
"mode": "incremental",
"status": "succeeded", // queued|running|succeeded|failed|canceled|replay_pending
"priority": "normal",
"queuedAt": "2025-10-26T14:05:00+00:00",
"startedAt": "2025-10-26T14:05:11+00:00",
"finishedAt": "2025-10-26T14:06:01+00:00",
"determinismHash": "sha256:...", // optional until run completes
"traceId": "01HE0BJX5S4T9YCN6ZT0",
"metadata": {"orchestrator": "scheduler", "sbombatchhash": "sha256:..."},
"stats": {
"components": 1742,
"rulesFired": 68023,
"findingsWritten": 4321,
"vexOverrides": 210,
"quieted": 12,
"durationSeconds": 50.8
},
"inputs": { ... } // same schema as request
}
```
* `determinismHash` must be a `sha256:` digest combining ordered input digests + policy digest.
* `attempts` (not shown) increases per retry.
* Error responses populate `errorCode` (`ERR_POL_00x`) and `error` message; omitted when successful.
## PolicyDiffSummary — `scheduler.policy-diff-summary@1`
Returned by simulation APIs; referenced by CLI/UI diff visualisations. Sample: `samples/api/scheduler/policy-diff-summary.json`.
```jsonc
{
"schemaVersion": "scheduler.policy-diff-summary@1",
"added": 12,
"removed": 8,
"unchanged": 657,
"bySeverity": {
"critical": {"up": 1},
"high": {"up": 3, "down": 4},
"medium": {"up": 2, "down": 1}
},
"ruleHits": [
{"ruleId": "rule-block-critical", "ruleName": "Block Critical Findings", "up": 1},
{"ruleId": "rule-quiet-low", "ruleName": "Quiet Low Risk", "down": 2}
]
}
```
* Severity bucket keys normalise to camelCase for JSON determinism across CLI/UI consumers.
* Zero-valued counts (`down`/`up`) are omitted to keep payloads compact.
* `ruleHits` sorts by `ruleId` to keep diff heatmaps deterministic.
## PolicyExplainTrace — `scheduler.policy-explain-trace@1`
Canonical explain tree embedded in findings explainers and exported bundles. Sample: `samples/api/scheduler/policy-explain-trace.json`.
```jsonc
{
"schemaVersion": "scheduler.policy-explain-trace@1",
"findingId": "finding:sbom:S-42/pkg:npm/lodash@4.17.21",
"policyId": "P-7",
"policyVersion": 4,
"tenantId": "default",
"runId": "run:P-7:2025-10-26:auto",
"evaluatedAt": "2025-10-26T14:06:01+00:00",
"verdict": {"status": "blocked", "severity": "critical", "score": 19.5},
"ruleChain": [
{"ruleId": "rule-allow-known", "action": "allow", "decision": "skipped"},
{"ruleId": "rule-block-critical", "action": "block", "decision": "matched", "score": 19.5}
],
"evidence": [
{"type": "advisory", "reference": "CVE-2025-12345", "source": "nvd", "status": "affected", "weight": 1, "metadata": {}},
{"type": "vex", "reference": "vex:ghsa-2025-0001", "source": "vendor", "status": "not_affected", "weight": 0.5}
],
"vexImpacts": [
{"statementId": "vex:ghsa-2025-0001", "provider": "vendor", "status": "not_affected", "accepted": true}
],
"history": [
{"status": "blocked", "occurredAt": "2025-10-26T14:06:01+00:00", "actor": "policy-engine"}
],
"metadata": {"componentpurl": "pkg:npm/lodash@4.17.21", "sbomid": "sbom:S-42", "traceid": "01HE0BJX5S4T9YCN6ZT0"}
}
```
* Rule chain preserves execution order; evidence & VEX arrays sort for deterministic outputs.
* Evidence metadata is always emitted (empty object when no attributes) so clients can merge annotations deterministically.
* Metadata keys lower-case for consistent lookups (`componentpurl`, `traceid`, etc.).
* `verdict.status` uses `passed|warned|blocked|quieted|ignored` reflecting final policy decision.
## Compliance Checklist
| Item | Owner | Status | Notes |
| --- | --- | --- | --- |
| Canonical samples committed (`policy-run-request|status|diff-summary|explain-trace`) | Scheduler Models Guild | ☑ 2025-10-26 | Round-trip tests enforce schema stability. |
| DTOs documented here and linked from `/docs/policy/runs.md` checklist | Scheduler Models Guild | ☑ 2025-10-26 | Added Run DTO schema section. |
| Serializer ensures deterministic ordering for new types | Scheduler Models Guild | ☑ 2025-10-26 | `CanonicalJsonSerializer` updated with property order + converters. |
| Tests cover DTO validation and sample fixtures | Scheduler Models Guild | ☑ 2025-10-26 | `PolicyRunModelsTests` + extended `SamplePayloadTests`. |
| Scheduler guilds notified (Models, Worker, WebService) | Scheduler Models Guild | ☑ 2025-10-26 | Posted in `#scheduler-guild` with sample links. |
---
*Last updated: 2025-10-26.*

View File

@@ -0,0 +1,107 @@
# SCHED-MODELS-21-001 — Graph Job DTOs
> Status: 2025-10-26 — **Complete**
Defines the scheduler-facing contracts for Cartographer orchestration. Both DTOs serialize with `CanonicalJsonSerializer` and share the `GraphJobStatus` lifecycle guarded by `GraphJobStateMachine`.
## GraphBuildJob — `scheduler.graph-build-job@1`
```jsonc
{
"schemaVersion": "scheduler.graph-build-job@1",
"id": "gbj_...",
"tenantId": "tenant-id",
"sbomId": "sbom-id",
"sbomVersionId": "sbom-version-id",
"sbomDigest": "sha256:<64-hex>",
"graphSnapshotId": "graph-snapshot-id?", // optional until Cartographer returns id
"status": "pending|queued|running|completed|failed|cancelled",
"trigger": "sbom-version|backfill|manual",
"attempts": 0,
"cartographerJobId": "external-id?", // optional identifier returned by Cartographer
"correlationId": "evt-...", // optional event correlation key
"createdAt": "2025-10-26T12:00:00+00:00",
"startedAt": "2025-10-26T12:00:05+00:00?",
"completedAt": "2025-10-26T12:00:35+00:00?",
"error": "cartographer timeout?", // populated only for failed state
"metadata": { // extra provenance (sorted, case-insensitive keys)
"sbomEventId": "sbom_evt_123"
}
}
```
* `sbomDigest` must be a lowercase `sha256:<hex>` string.
* `attempts` is monotonic across retries; `GraphJobStateMachine.EnsureTransition` enforces non-decreasing values and timestamps.
* Terminal states (`completed|failed|cancelled`) require `completedAt` to be set; failures require `error`.
## GraphOverlayJob — `scheduler.graph-overlay-job@1`
```jsonc
{
"schemaVersion": "scheduler.graph-overlay-job@1",
"id": "goj_...",
"tenantId": "tenant-id",
"graphSnapshotId": "graph-snapshot-id",
"buildJobId": "gbj_...?",
"overlayKind": "policy|advisory|vex",
"overlayKey": "policy@2025-10-01",
"subjects": [
"artifact/service-api",
"artifact/service-worker"
],
"status": "pending|queued|running|completed|failed|cancelled",
"trigger": "policy|advisory|vex|sbom-version|manual",
"attempts": 0,
"correlationId": "policy_run_321?",
"createdAt": "2025-10-26T12:05:00+00:00",
"startedAt": "2025-10-26T12:05:05+00:00?",
"completedAt": "2025-10-26T12:05:15+00:00?",
"error": "overlay build failed?",
"metadata": {
"policyRunId": "policy_run_321"
}
}
```
* `overlayKey` is free-form but trimmed; `subjects` are deduplicated and lexicographically ordered.
* `GraphOverlayJobTrigger` strings (`policy`, `advisory`, `vex`, `sbom-version`, `manual`) align with upstream events (Policy Engine, Conseiller, Excititor, SBOM Service, or manual enqueue).
* State invariants mirror build jobs: timestamps advance monotonically, terminal states require `completedAt`, failures require `error`.
## Status & trigger matrix
| Enum | JSON values |
| --- | --- |
| `GraphJobStatus` | `pending`, `queued`, `running`, `completed`, `failed`, `cancelled` |
| `GraphBuildJobTrigger` | `sbom-version`, `backfill`, `manual` |
| `GraphOverlayJobTrigger` | `policy`, `advisory`, `vex`, `sbom-version`, `manual` |
| `GraphOverlayKind` | `policy`, `advisory`, `vex` |
`GraphJobStateMachine` exposes `CanTransition` and `EnsureTransition(...)` helpers to keep scheduler workers deterministic and to centralize validation logic. Callers must provide an error message when moving to `failed`; other states clear the error automatically.
---
## Published samples
- `samples/api/scheduler/graph-build-job.json` canonical Cartographer build request snapshot (status `running`, one retry).
- `samples/api/scheduler/graph-overlay-job.json` queued policy overlay job with deduplicated `subjects`.
- `docs/events/samples/scheduler.graph.job.completed@1.sample.json` legacy completion event embedding the canonical job payload for downstream caches/UI.
Tests in `StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs` validate the job fixtures against the canonical serializer.
---
## Events
Scheduler emits `scheduler.graph.job.completed@1` when a graph build or overlay job reaches `completed`, `failed`, or `cancelled`. Schema lives at `docs/events/scheduler.graph.job.completed@1.json` (legacy envelope) and the sample above illustrates the canonical payload. Downstream services should validate their consumers against the schema and budget for eventual migration to the orchestrator envelope once Cartographer hooks are promoted.
---
## Compliance checklist
| Item | Owner | Status | Notes |
| --- | --- | --- | --- |
| Canonical graph job samples committed under `samples/api/scheduler` | Scheduler Models Guild | ☑ 2025-10-26 | Round-trip tests cover both payloads. |
| Schema doc published with trigger/status matrix and sample references | Scheduler Models Guild | ☑ 2025-10-26 | This document. |
| Event schema + sample published under `docs/events/` | Scheduler Models Guild | ☑ 2025-10-26 | `scheduler.graph.job.completed@1` covers terminal job events. |
| Notify Scheduler WebService & Worker guilds about new DTO availability | Scheduler Models Guild | ☑ 2025-10-26 | Announcement posted (see `docs/updates/2025-10-26-scheduler-graph-jobs.md`). |
| Notify Cartographer Guild about expected job metadata (`graphSnapshotId`, `cartographerJobId`) | Scheduler Models Guild | ☑ 2025-10-26 | Included in Cartographer sync note (`docs/updates/2025-10-26-scheduler-graph-jobs.md`). |

View File

@@ -0,0 +1,4 @@
# StellaOps.Scheduler.Queue — Agent Charter
## Mission
Provide queue abstraction (Redis Streams / NATS JetStream) for planner inputs and runner segments per `docs/ARCHITECTURE_SCHEDULER.md`.

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Scheduler.Queue.Tests")]

View File

@@ -0,0 +1,9 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scheduler.Queue;
internal interface ISchedulerQueueTransportDiagnostics
{
ValueTask PingAsync(CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,26 @@
using System.Collections.Generic;
namespace StellaOps.Scheduler.Queue.Nats;
internal interface INatsSchedulerQueuePayload<TMessage>
{
string QueueName { get; }
string GetIdempotencyKey(TMessage message);
byte[] Serialize(TMessage message);
TMessage Deserialize(byte[] payload);
string GetRunId(TMessage message);
string GetTenantId(TMessage message);
string? GetScheduleId(TMessage message);
string? GetSegmentId(TMessage message);
string? GetCorrelationId(TMessage message);
IReadOnlyDictionary<string, string>? GetAttributes(TMessage message);
}

View File

@@ -0,0 +1,66 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using NATS.Client.Core;
using NATS.Client.JetStream;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Queue.Nats;
internal sealed class NatsSchedulerPlannerQueue
: NatsSchedulerQueueBase<PlannerQueueMessage>, ISchedulerPlannerQueue
{
public NatsSchedulerPlannerQueue(
SchedulerQueueOptions queueOptions,
SchedulerNatsQueueOptions natsOptions,
ILogger<NatsSchedulerPlannerQueue> logger,
TimeProvider timeProvider,
Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null)
: base(
queueOptions,
natsOptions,
natsOptions.Planner,
PlannerPayload.Instance,
logger,
timeProvider,
connectionFactory)
{
}
private sealed class PlannerPayload : INatsSchedulerQueuePayload<PlannerQueueMessage>
{
public static PlannerPayload Instance { get; } = new();
public string QueueName => "planner";
public string GetIdempotencyKey(PlannerQueueMessage message)
=> message.IdempotencyKey;
public byte[] Serialize(PlannerQueueMessage message)
=> Encoding.UTF8.GetBytes(CanonicalJsonSerializer.Serialize(message));
public PlannerQueueMessage Deserialize(byte[] payload)
=> CanonicalJsonSerializer.Deserialize<PlannerQueueMessage>(Encoding.UTF8.GetString(payload));
public string GetRunId(PlannerQueueMessage message)
=> message.Run.Id;
public string GetTenantId(PlannerQueueMessage message)
=> message.Run.TenantId;
public string? GetScheduleId(PlannerQueueMessage message)
=> message.ScheduleId;
public string? GetSegmentId(PlannerQueueMessage message)
=> null;
public string? GetCorrelationId(PlannerQueueMessage message)
=> message.CorrelationId;
public IReadOnlyDictionary<string, string>? GetAttributes(PlannerQueueMessage message)
=> null;
}
}

View File

@@ -0,0 +1,692 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using NATS.Client.Core;
using NATS.Client.JetStream;
using NATS.Client.JetStream.Models;
namespace StellaOps.Scheduler.Queue.Nats;
internal abstract class NatsSchedulerQueueBase<TMessage> : ISchedulerQueue<TMessage>, IAsyncDisposable, ISchedulerQueueTransportDiagnostics
{
private const string TransportName = "nats";
private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default;
private readonly SchedulerQueueOptions _queueOptions;
private readonly SchedulerNatsQueueOptions _natsOptions;
private readonly SchedulerNatsStreamOptions _streamOptions;
private readonly INatsSchedulerQueuePayload<TMessage> _payload;
private readonly ILogger _logger;
private readonly TimeProvider _timeProvider;
private readonly SemaphoreSlim _connectionGate = new(1, 1);
private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory;
private NatsConnection? _connection;
private NatsJSContext? _jsContext;
private INatsJSConsumer? _consumer;
private bool _disposed;
private long _approximateDepth;
protected NatsSchedulerQueueBase(
SchedulerQueueOptions queueOptions,
SchedulerNatsQueueOptions natsOptions,
SchedulerNatsStreamOptions streamOptions,
INatsSchedulerQueuePayload<TMessage> payload,
ILogger logger,
TimeProvider timeProvider,
Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null)
{
_queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions));
_natsOptions = natsOptions ?? throw new ArgumentNullException(nameof(natsOptions));
_streamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions));
_payload = payload ?? throw new ArgumentNullException(nameof(payload));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_connectionFactory = connectionFactory ?? ((opts, cancellationToken) => new ValueTask<NatsConnection>(new NatsConnection(opts)));
if (string.IsNullOrWhiteSpace(_natsOptions.Url))
{
throw new InvalidOperationException("NATS connection URL must be configured for the scheduler queue.");
}
}
public async ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(
TMessage message,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(message);
var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false);
await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false);
var payloadBytes = _payload.Serialize(message);
var idempotencyKey = _payload.GetIdempotencyKey(message);
var headers = BuildHeaders(message, idempotencyKey);
var publishOptions = new NatsJSPubOpts
{
MsgId = idempotencyKey,
RetryAttempts = 0
};
var ack = await js.PublishAsync(
_streamOptions.Subject,
payloadBytes,
PayloadSerializer,
publishOptions,
headers,
cancellationToken)
.ConfigureAwait(false);
if (ack.Duplicate)
{
SchedulerQueueMetrics.RecordDeduplicated(TransportName, _payload.QueueName);
_logger.LogDebug(
"Duplicate enqueue detected for scheduler {Queue} message idempotency key {Key}; sequence {Sequence} reused.",
_payload.QueueName,
idempotencyKey,
ack.Seq);
PublishDepth();
return new SchedulerQueueEnqueueResult(ack.Seq.ToString(), true);
}
SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName);
_logger.LogDebug(
"Enqueued scheduler {Queue} message into stream {Stream} with sequence {Sequence}.",
_payload.QueueName,
ack.Stream,
ack.Seq);
IncrementDepth();
return new SchedulerQueueEnqueueResult(ack.Seq.ToString(), false);
}
public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync(
SchedulerQueueLeaseRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false);
var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false);
var fetchOpts = new NatsJSFetchOpts
{
MaxMsgs = request.BatchSize,
Expires = request.LeaseDuration,
IdleHeartbeat = _natsOptions.IdleHeartbeat
};
var now = _timeProvider.GetUtcNow();
var leases = new List<ISchedulerQueueLease<TMessage>>(request.BatchSize);
await foreach (var message in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false))
{
var lease = CreateLease(message, request.Consumer, now, request.LeaseDuration);
if (lease is not null)
{
leases.Add(lease);
}
}
PublishDepth();
return leases;
}
public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync(
SchedulerQueueClaimOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(options);
var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false);
var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false);
var fetchOpts = new NatsJSFetchOpts
{
MaxMsgs = options.BatchSize,
Expires = options.MinIdleTime,
IdleHeartbeat = _natsOptions.IdleHeartbeat
};
var now = _timeProvider.GetUtcNow();
var leases = new List<ISchedulerQueueLease<TMessage>>(options.BatchSize);
await foreach (var message in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false))
{
var deliveries = (int)(message.Metadata?.NumDelivered ?? 1);
if (deliveries <= 1)
{
await message.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false);
continue;
}
var lease = CreateLease(message, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration);
if (lease is not null)
{
leases.Add(lease);
}
}
PublishDepth();
return leases;
}
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_connection is not null)
{
await _connection.DisposeAsync().ConfigureAwait(false);
}
_connectionGate.Dispose();
SchedulerQueueMetrics.RemoveDepth(TransportName, _payload.QueueName);
GC.SuppressFinalize(this);
}
public async ValueTask PingAsync(CancellationToken cancellationToken)
{
var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false);
await connection.PingAsync(cancellationToken).ConfigureAwait(false);
}
internal async Task AcknowledgeAsync(NatsSchedulerQueueLease<TMessage> lease, CancellationToken cancellationToken)
{
if (!lease.TryBeginCompletion())
{
return;
}
await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false);
SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName);
DecrementDepth();
}
internal async Task RenewAsync(NatsSchedulerQueueLease<TMessage> lease, TimeSpan leaseDuration, CancellationToken cancellationToken)
{
await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false);
lease.RefreshLease(_timeProvider.GetUtcNow().Add(leaseDuration));
}
internal async Task ReleaseAsync(NatsSchedulerQueueLease<TMessage> lease, SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken)
{
if (disposition == SchedulerQueueReleaseDisposition.Retry && lease.Attempt >= _queueOptions.MaxDeliveryAttempts)
{
await DeadLetterAsync(lease, $"max-delivery-attempts:{lease.Attempt}", cancellationToken).ConfigureAwait(false);
return;
}
if (!lease.TryBeginCompletion())
{
return;
}
if (disposition == SchedulerQueueReleaseDisposition.Retry)
{
SchedulerQueueMetrics.RecordRetry(TransportName, _payload.QueueName);
var delay = CalculateBackoff(lease.Attempt + 1);
lease.IncrementAttempt();
await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false);
_logger.LogWarning(
"Requeued scheduler {Queue} message {RunId} with delay {Delay} (attempt {Attempt}).",
_payload.QueueName,
lease.RunId,
delay,
lease.Attempt);
}
else
{
await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false);
SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName);
DecrementDepth();
_logger.LogInformation(
"Abandoned scheduler {Queue} message {RunId} after {Attempt} attempt(s).",
_payload.QueueName,
lease.RunId,
lease.Attempt);
}
PublishDepth();
}
internal async Task DeadLetterAsync(NatsSchedulerQueueLease<TMessage> lease, string reason, CancellationToken cancellationToken)
{
if (!lease.TryBeginCompletion())
{
return;
}
await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false);
DecrementDepth();
var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false);
if (!_queueOptions.DeadLetterEnabled)
{
_logger.LogWarning(
"Dropped scheduler {Queue} message {RunId} after {Attempt} attempt(s); dead-letter disabled. Reason: {Reason}",
_payload.QueueName,
lease.RunId,
lease.Attempt,
reason);
PublishDepth();
return;
}
await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false);
var headers = BuildDeadLetterHeaders(lease, reason);
await js.PublishAsync(
_streamOptions.DeadLetterSubject,
lease.Payload,
PayloadSerializer,
new NatsJSPubOpts(),
headers,
cancellationToken)
.ConfigureAwait(false);
SchedulerQueueMetrics.RecordDeadLetter(TransportName, _payload.QueueName);
_logger.LogError(
"Dead-lettered scheduler {Queue} message {RunId} after {Attempt} attempt(s): {Reason}",
_payload.QueueName,
lease.RunId,
lease.Attempt,
reason);
PublishDepth();
}
private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken)
{
if (_jsContext is not null)
{
return _jsContext;
}
var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false);
await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
_jsContext ??= new NatsJSContext(connection);
return _jsContext;
}
finally
{
_connectionGate.Release();
}
}
private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync(NatsJSContext js, CancellationToken cancellationToken)
{
if (_consumer is not null)
{
return _consumer;
}
await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_consumer is not null)
{
return _consumer;
}
await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false);
await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false);
var consumerConfig = new ConsumerConfig
{
DurableName = _streamOptions.DurableConsumer,
AckPolicy = ConsumerConfigAckPolicy.Explicit,
ReplayPolicy = ConsumerConfigReplayPolicy.Instant,
DeliverPolicy = ConsumerConfigDeliverPolicy.All,
AckWait = ToNanoseconds(_streamOptions.AckWait),
MaxAckPending = Math.Max(1, _streamOptions.MaxAckPending),
MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts),
FilterSubjects = new[] { _streamOptions.Subject }
};
try
{
_consumer = await js.CreateConsumerAsync(
_streamOptions.Stream,
consumerConfig,
cancellationToken)
.ConfigureAwait(false);
}
catch (NatsJSApiException apiEx)
{
_logger.LogDebug(apiEx,
"CreateConsumerAsync failed with code {Code}; attempting to reuse durable {Durable}.",
apiEx.Error?.Code,
_streamOptions.DurableConsumer);
_consumer = await js.GetConsumerAsync(
_streamOptions.Stream,
_streamOptions.DurableConsumer,
cancellationToken)
.ConfigureAwait(false);
}
return _consumer;
}
finally
{
_connectionGate.Release();
}
}
private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken)
{
try
{
await js.GetStreamAsync(
_streamOptions.Stream,
new StreamInfoRequest(),
cancellationToken)
.ConfigureAwait(false);
}
catch (NatsJSApiException)
{
var config = new StreamConfig(
name: _streamOptions.Stream,
subjects: new[] { _streamOptions.Subject })
{
Retention = StreamConfigRetention.Workqueue,
Storage = StreamConfigStorage.File,
MaxConsumers = -1,
MaxMsgs = -1,
MaxBytes = -1,
MaxAge = 0
};
await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Created NATS JetStream stream {Stream} ({Subject}) for scheduler {Queue} queue.",
_streamOptions.Stream,
_streamOptions.Subject,
_payload.QueueName);
}
}
private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(_streamOptions.DeadLetterStream) || string.IsNullOrWhiteSpace(_streamOptions.DeadLetterSubject))
{
return;
}
try
{
await js.GetStreamAsync(
_streamOptions.DeadLetterStream,
new StreamInfoRequest(),
cancellationToken)
.ConfigureAwait(false);
}
catch (NatsJSApiException)
{
var config = new StreamConfig(
name: _streamOptions.DeadLetterStream,
subjects: new[] { _streamOptions.DeadLetterSubject })
{
Retention = StreamConfigRetention.Workqueue,
Storage = StreamConfigStorage.File,
MaxConsumers = -1,
MaxMsgs = -1,
MaxBytes = -1,
MaxAge = 0
};
await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false);
_logger.LogInformation(
"Created NATS JetStream dead-letter stream {Stream} ({Subject}) for scheduler {Queue} queue.",
_streamOptions.DeadLetterStream,
_streamOptions.DeadLetterSubject,
_payload.QueueName);
}
}
private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken)
{
if (_connection is not null)
{
return _connection;
}
await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_connection is not null)
{
return _connection;
}
var options = new NatsOpts
{
Url = _natsOptions.Url!,
Name = $"stellaops-scheduler-{_payload.QueueName}-queue",
CommandTimeout = TimeSpan.FromSeconds(10),
RequestTimeout = TimeSpan.FromSeconds(20),
PingInterval = TimeSpan.FromSeconds(30)
};
_connection = await _connectionFactory(options, cancellationToken).ConfigureAwait(false);
await _connection.ConnectAsync().ConfigureAwait(false);
return _connection;
}
finally
{
_connectionGate.Release();
}
}
private NatsSchedulerQueueLease<TMessage>? CreateLease(
NatsJSMsg<byte[]> message,
string consumer,
DateTimeOffset now,
TimeSpan leaseDuration)
{
var payload = message.Data ?? ReadOnlyMemory<byte>.Empty;
if (payload.IsEmpty)
{
return null;
}
TMessage deserialized;
try
{
deserialized = _payload.Deserialize(payload.ToArray());
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to deserialize scheduler {Queue} payload from NATS sequence {Sequence}.", _payload.QueueName, message.Metadata?.Sequence);
return null;
}
var attempt = (int)(message.Metadata?.NumDelivered ?? 1);
if (attempt <= 0)
{
attempt = 1;
}
var headers = message.Headers ?? new NatsHeaders();
var enqueuedAt = headers.TryGetValue(SchedulerQueueFields.EnqueuedAt, out var enqueuedValues) && enqueuedValues.Count > 0
&& long.TryParse(enqueuedValues[0], out var unix)
? DateTimeOffset.FromUnixTimeMilliseconds(unix)
: now;
var leaseExpires = now.Add(leaseDuration);
var runId = _payload.GetRunId(deserialized);
var tenantId = _payload.GetTenantId(deserialized);
var scheduleId = _payload.GetScheduleId(deserialized);
var segmentId = _payload.GetSegmentId(deserialized);
var correlationId = _payload.GetCorrelationId(deserialized);
var attributes = _payload.GetAttributes(deserialized) ?? new Dictionary<string, string>();
var attributeView = attributes.Count == 0
? EmptyReadOnlyDictionary<string, string>.Instance
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal));
return new NatsSchedulerQueueLease<TMessage>(
this,
message,
payload.ToArray(),
_payload.GetIdempotencyKey(deserialized),
runId,
tenantId,
scheduleId,
segmentId,
correlationId,
attributeView,
deserialized,
attempt,
enqueuedAt,
leaseExpires,
consumer);
}
private NatsHeaders BuildHeaders(TMessage message, string idempotencyKey)
{
var headers = new NatsHeaders
{
{ SchedulerQueueFields.IdempotencyKey, idempotencyKey },
{ SchedulerQueueFields.RunId, _payload.GetRunId(message) },
{ SchedulerQueueFields.TenantId, _payload.GetTenantId(message) },
{ SchedulerQueueFields.QueueKind, _payload.QueueName },
{ SchedulerQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString() }
};
var scheduleId = _payload.GetScheduleId(message);
if (!string.IsNullOrWhiteSpace(scheduleId))
{
headers.Add(SchedulerQueueFields.ScheduleId, scheduleId);
}
var segmentId = _payload.GetSegmentId(message);
if (!string.IsNullOrWhiteSpace(segmentId))
{
headers.Add(SchedulerQueueFields.SegmentId, segmentId);
}
var correlationId = _payload.GetCorrelationId(message);
if (!string.IsNullOrWhiteSpace(correlationId))
{
headers.Add(SchedulerQueueFields.CorrelationId, correlationId);
}
var attributes = _payload.GetAttributes(message);
if (attributes is not null)
{
foreach (var kvp in attributes)
{
headers.Add(SchedulerQueueFields.AttributePrefix + kvp.Key, kvp.Value);
}
}
return headers;
}
private NatsHeaders BuildDeadLetterHeaders(NatsSchedulerQueueLease<TMessage> lease, string reason)
{
var headers = new NatsHeaders
{
{ SchedulerQueueFields.RunId, lease.RunId },
{ SchedulerQueueFields.TenantId, lease.TenantId },
{ SchedulerQueueFields.QueueKind, _payload.QueueName },
{ "reason", reason }
};
if (!string.IsNullOrWhiteSpace(lease.ScheduleId))
{
headers.Add(SchedulerQueueFields.ScheduleId, lease.ScheduleId);
}
if (!string.IsNullOrWhiteSpace(lease.CorrelationId))
{
headers.Add(SchedulerQueueFields.CorrelationId, lease.CorrelationId);
}
if (!string.IsNullOrWhiteSpace(lease.SegmentId))
{
headers.Add(SchedulerQueueFields.SegmentId, lease.SegmentId);
}
return headers;
}
private TimeSpan CalculateBackoff(int attempt)
{
var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero
? _queueOptions.RetryInitialBackoff
: _streamOptions.RetryDelay;
if (initial <= TimeSpan.Zero)
{
return TimeSpan.Zero;
}
if (attempt <= 1)
{
return initial;
}
var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero
? _queueOptions.RetryMaxBackoff
: initial;
var exponent = attempt - 1;
var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1);
var cappedTicks = Math.Min(max.Ticks, scaledTicks);
return TimeSpan.FromTicks((long)Math.Max(initial.Ticks, cappedTicks));
}
private static long ToNanoseconds(TimeSpan value)
=> value <= TimeSpan.Zero ? 0 : (long)(value.TotalMilliseconds * 1_000_000.0);
private sealed class EmptyReadOnlyDictionary<TKey, TValue>
where TKey : notnull
{
public static readonly IReadOnlyDictionary<TKey, TValue> Instance =
new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default));
}
private void IncrementDepth()
{
var depth = Interlocked.Increment(ref _approximateDepth);
SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth);
}
private void DecrementDepth()
{
var depth = Interlocked.Decrement(ref _approximateDepth);
if (depth < 0)
{
depth = Interlocked.Exchange(ref _approximateDepth, 0);
}
SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth);
}
private void PublishDepth()
{
var depth = Volatile.Read(ref _approximateDepth);
SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth);
}
}

View File

@@ -0,0 +1,101 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using NATS.Client.JetStream;
namespace StellaOps.Scheduler.Queue.Nats;
internal sealed class NatsSchedulerQueueLease<TMessage> : ISchedulerQueueLease<TMessage>
{
private readonly NatsSchedulerQueueBase<TMessage> _queue;
private int _completed;
internal NatsSchedulerQueueLease(
NatsSchedulerQueueBase<TMessage> queue,
NatsJSMsg<byte[]> message,
byte[] payload,
string idempotencyKey,
string runId,
string tenantId,
string? scheduleId,
string? segmentId,
string? correlationId,
IReadOnlyDictionary<string, string> attributes,
TMessage deserialized,
int attempt,
DateTimeOffset enqueuedAt,
DateTimeOffset leaseExpiresAt,
string consumer)
{
_queue = queue;
MessageId = message.Metadata?.Sequence.ToString() ?? idempotencyKey;
RunId = runId;
TenantId = tenantId;
ScheduleId = scheduleId;
SegmentId = segmentId;
CorrelationId = correlationId;
Attributes = attributes;
Attempt = attempt;
EnqueuedAt = enqueuedAt;
LeaseExpiresAt = leaseExpiresAt;
Consumer = consumer;
IdempotencyKey = idempotencyKey;
Message = deserialized;
_message = message;
Payload = payload;
}
private readonly NatsJSMsg<byte[]> _message;
internal NatsJSMsg<byte[]> RawMessage => _message;
internal byte[] Payload { get; }
public string MessageId { get; }
public string IdempotencyKey { get; }
public string RunId { get; }
public string TenantId { get; }
public string? ScheduleId { get; }
public string? SegmentId { get; }
public string? CorrelationId { get; }
public IReadOnlyDictionary<string, string> Attributes { get; }
public TMessage Message { get; }
public int Attempt { get; private set; }
public DateTimeOffset EnqueuedAt { get; }
public DateTimeOffset LeaseExpiresAt { get; private set; }
public string Consumer { get; }
public Task AcknowledgeAsync(CancellationToken cancellationToken = default)
=> _queue.AcknowledgeAsync(this, cancellationToken);
public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default)
=> _queue.RenewAsync(this, leaseDuration, cancellationToken);
public Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default)
=> _queue.ReleaseAsync(this, disposition, cancellationToken);
public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default)
=> _queue.DeadLetterAsync(this, reason, cancellationToken);
internal bool TryBeginCompletion()
=> Interlocked.CompareExchange(ref _completed, 1, 0) == 0;
internal void RefreshLease(DateTimeOffset expiresAt)
=> LeaseExpiresAt = expiresAt;
internal void IncrementAttempt()
=> Attempt++;
}

View File

@@ -0,0 +1,74 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using NATS.Client.Core;
using NATS.Client.JetStream;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Queue.Nats;
internal sealed class NatsSchedulerRunnerQueue
: NatsSchedulerQueueBase<RunnerSegmentQueueMessage>, ISchedulerRunnerQueue
{
public NatsSchedulerRunnerQueue(
SchedulerQueueOptions queueOptions,
SchedulerNatsQueueOptions natsOptions,
ILogger<NatsSchedulerRunnerQueue> logger,
TimeProvider timeProvider,
Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null)
: base(
queueOptions,
natsOptions,
natsOptions.Runner,
RunnerPayload.Instance,
logger,
timeProvider,
connectionFactory)
{
}
private sealed class RunnerPayload : INatsSchedulerQueuePayload<RunnerSegmentQueueMessage>
{
public static RunnerPayload Instance { get; } = new();
public string QueueName => "runner";
public string GetIdempotencyKey(RunnerSegmentQueueMessage message)
=> message.IdempotencyKey;
public byte[] Serialize(RunnerSegmentQueueMessage message)
=> Encoding.UTF8.GetBytes(CanonicalJsonSerializer.Serialize(message));
public RunnerSegmentQueueMessage Deserialize(byte[] payload)
=> CanonicalJsonSerializer.Deserialize<RunnerSegmentQueueMessage>(Encoding.UTF8.GetString(payload));
public string GetRunId(RunnerSegmentQueueMessage message)
=> message.RunId;
public string GetTenantId(RunnerSegmentQueueMessage message)
=> message.TenantId;
public string? GetScheduleId(RunnerSegmentQueueMessage message)
=> message.ScheduleId;
public string? GetSegmentId(RunnerSegmentQueueMessage message)
=> message.SegmentId;
public string? GetCorrelationId(RunnerSegmentQueueMessage message)
=> message.CorrelationId;
public IReadOnlyDictionary<string, string>? GetAttributes(RunnerSegmentQueueMessage message)
{
if (message.Attributes is null || message.Attributes.Count == 0)
{
return null;
}
return message.Attributes.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal);
}
}
}

View File

@@ -0,0 +1,45 @@
# Scheduler Queue — Sprint 16 Coordination Notes
Queue work now has concrete contracts from `StellaOps.Scheduler.Models`:
* Planner inputs reference `Schedule` and `ImpactSet` samples (`samples/api/scheduler/`).
* Runner segment payloads should carry `runId`, `scheduleId?`, `tenantId`, and the impacted digest list (mirrors `Run.Deltas`).
* Notify fanout relies on the `DeltaSummary` shape already emitted by the model layer.
## Action items for SCHED-QUEUE-16-401..403
1. Reference `StellaOps.Scheduler.Models` so adapters can serialise `Run`/`DeltaSummary` without bespoke DTOs.
2. Use the canonical serializer for queue messages to keep ordering consistent with API payloads.
3. Coverage: add fixture-driven tests that enqueue the sample payloads, then dequeue and re-serialise to verify byte-for-byte stability.
4. Expose queue depth/lease metrics with the identifiers provided by the models (`Run.Id`, `Schedule.Id`).
## JetStream failover notes
- `SchedulerQueueOptions.Kind = "nats"` will spin up `NatsSchedulerPlannerQueue` / `NatsSchedulerRunnerQueue` instances backed by JetStream.
- `SchedulerQueueHealthCheck` pings both planner and runner transports; register via `AddSchedulerQueueHealthCheck()` to surface in `/healthz`.
- Planner defaults:
```yaml
scheduler:
queue:
kind: nats
deadLetterEnabled: true
nats:
url: "nats://nats:4222"
planner:
stream: SCHEDULER_PLANNER
subject: scheduler.planner
durableConsumer: scheduler-planners
deadLetterStream: SCHEDULER_PLANNER_DEAD
deadLetterSubject: scheduler.planner.dead
runner:
stream: SCHEDULER_RUNNER
subject: scheduler.runner
durableConsumer: scheduler-runners
redis:
deadLetterStream: scheduler:planner:dead
idempotencyKeyPrefix: scheduler:planner:idemp:
```
- Retry / dead-letter semantics mirror the Redis adapter: attempts beyond `MaxDeliveryAttempts` are shipped to the configured dead-letter stream with headers describing `runId`, `scheduleId`, and failure reasons. Set `deadLetterEnabled: false` to drop exhausted messages instead.
- Depth metrics surface through `scheduler_queue_depth{transport,queue}`; both transports publish lightweight counters to drive alerting dashboards.
These notes unblock the queue guild now that SCHED-MODELS-16-102 is complete.

View File

@@ -0,0 +1,26 @@
using System.Collections.Generic;
namespace StellaOps.Scheduler.Queue.Redis;
internal interface IRedisSchedulerQueuePayload<TMessage>
{
string QueueName { get; }
string GetIdempotencyKey(TMessage message);
string Serialize(TMessage message);
TMessage Deserialize(string payload);
string GetRunId(TMessage message);
string GetTenantId(TMessage message);
string? GetScheduleId(TMessage message);
string? GetSegmentId(TMessage message);
string? GetCorrelationId(TMessage message);
IReadOnlyDictionary<string, string>? GetAttributes(TMessage message);
}

View File

@@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StackExchange.Redis;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Queue.Redis;
internal sealed class RedisSchedulerPlannerQueue
: RedisSchedulerQueueBase<PlannerQueueMessage>, ISchedulerPlannerQueue
{
public RedisSchedulerPlannerQueue(
SchedulerQueueOptions queueOptions,
SchedulerRedisQueueOptions redisOptions,
ILogger<RedisSchedulerPlannerQueue> logger,
TimeProvider timeProvider,
Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null)
: base(
queueOptions,
redisOptions,
redisOptions.Planner,
PlannerPayload.Instance,
logger,
timeProvider,
connectionFactory)
{
}
private sealed class PlannerPayload : IRedisSchedulerQueuePayload<PlannerQueueMessage>
{
public static PlannerPayload Instance { get; } = new();
public string QueueName => "planner";
public string GetIdempotencyKey(PlannerQueueMessage message)
=> message.IdempotencyKey;
public string Serialize(PlannerQueueMessage message)
=> CanonicalJsonSerializer.Serialize(message);
public PlannerQueueMessage Deserialize(string payload)
=> CanonicalJsonSerializer.Deserialize<PlannerQueueMessage>(payload);
public string GetRunId(PlannerQueueMessage message)
=> message.Run.Id;
public string GetTenantId(PlannerQueueMessage message)
=> message.Run.TenantId;
public string? GetScheduleId(PlannerQueueMessage message)
=> message.ScheduleId;
public string? GetSegmentId(PlannerQueueMessage message)
=> null;
public string? GetCorrelationId(PlannerQueueMessage message)
=> message.CorrelationId;
public IReadOnlyDictionary<string, string>? GetAttributes(PlannerQueueMessage message)
=> null;
}
}

View File

@@ -0,0 +1,805 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StackExchange.Redis;
namespace StellaOps.Scheduler.Queue.Redis;
internal abstract class RedisSchedulerQueueBase<TMessage> : ISchedulerQueue<TMessage>, IAsyncDisposable, ISchedulerQueueTransportDiagnostics
{
private const string TransportName = "redis";
private readonly SchedulerQueueOptions _queueOptions;
private readonly SchedulerRedisQueueOptions _redisOptions;
private readonly RedisSchedulerStreamOptions _streamOptions;
private readonly IRedisSchedulerQueuePayload<TMessage> _payload;
private readonly ILogger _logger;
private readonly TimeProvider _timeProvider;
private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory;
private readonly SemaphoreSlim _connectionLock = new(1, 1);
private readonly SemaphoreSlim _groupInitLock = new(1, 1);
private long _approximateDepth;
private IConnectionMultiplexer? _connection;
private volatile bool _groupInitialized;
private bool _disposed;
protected RedisSchedulerQueueBase(
SchedulerQueueOptions queueOptions,
SchedulerRedisQueueOptions redisOptions,
RedisSchedulerStreamOptions streamOptions,
IRedisSchedulerQueuePayload<TMessage> payload,
ILogger logger,
TimeProvider timeProvider,
Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null)
{
_queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions));
_redisOptions = redisOptions ?? throw new ArgumentNullException(nameof(redisOptions));
_streamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions));
_payload = payload ?? throw new ArgumentNullException(nameof(payload));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_connectionFactory = connectionFactory ?? (config => Task.FromResult<IConnectionMultiplexer>(ConnectionMultiplexer.Connect(config)));
if (string.IsNullOrWhiteSpace(_redisOptions.ConnectionString))
{
throw new InvalidOperationException("Redis connection string must be configured for the scheduler queue.");
}
if (string.IsNullOrWhiteSpace(_streamOptions.Stream))
{
throw new InvalidOperationException("Redis stream name must be configured for the scheduler queue.");
}
if (string.IsNullOrWhiteSpace(_streamOptions.ConsumerGroup))
{
throw new InvalidOperationException("Redis consumer group must be configured for the scheduler queue.");
}
}
public async ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(
TMessage message,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(message);
cancellationToken.ThrowIfCancellationRequested();
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await EnsureConsumerGroupAsync(database, cancellationToken).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var attempt = 1;
var entries = BuildEntries(message, now, attempt);
var messageId = await AddToStreamAsync(
database,
_streamOptions.Stream,
entries,
_streamOptions.ApproximateMaxLength,
_streamOptions.ApproximateMaxLength is not null)
.ConfigureAwait(false);
var idempotencyKey = BuildIdempotencyKey(_payload.GetIdempotencyKey(message));
var stored = await database.StringSetAsync(
idempotencyKey,
messageId,
when: When.NotExists,
expiry: _streamOptions.IdempotencyWindow)
.ConfigureAwait(false);
if (!stored)
{
await database.StreamDeleteAsync(_streamOptions.Stream, new RedisValue[] { messageId }).ConfigureAwait(false);
var existing = await database.StringGetAsync(idempotencyKey).ConfigureAwait(false);
var reusable = existing.IsNullOrEmpty ? messageId : existing;
SchedulerQueueMetrics.RecordDeduplicated(TransportName, _payload.QueueName);
_logger.LogDebug(
"Duplicate enqueue detected for scheduler queue {Queue} with key {Key}; returning existing stream id {StreamId}.",
_payload.QueueName,
idempotencyKey,
reusable.ToString());
PublishDepth();
return new SchedulerQueueEnqueueResult(reusable.ToString(), true);
}
SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName);
_logger.LogDebug(
"Enqueued {Queue} message into {Stream} with id {StreamId}.",
_payload.QueueName,
_streamOptions.Stream,
messageId.ToString());
IncrementDepth();
return new SchedulerQueueEnqueueResult(messageId.ToString(), false);
}
public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync(
SchedulerQueueLeaseRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
cancellationToken.ThrowIfCancellationRequested();
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await EnsureConsumerGroupAsync(database, cancellationToken).ConfigureAwait(false);
var entries = await database.StreamReadGroupAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
request.Consumer,
position: ">",
count: request.BatchSize,
flags: CommandFlags.None)
.ConfigureAwait(false);
if (entries is null || entries.Length == 0)
{
PublishDepth();
return Array.Empty<ISchedulerQueueLease<TMessage>>();
}
var now = _timeProvider.GetUtcNow();
var leases = new List<ISchedulerQueueLease<TMessage>>(entries.Length);
foreach (var entry in entries)
{
var lease = TryMapLease(entry, request.Consumer, now, request.LeaseDuration, attemptOverride: null);
if (lease is null)
{
await HandlePoisonEntryAsync(database, entry.Id).ConfigureAwait(false);
continue;
}
leases.Add(lease);
}
PublishDepth();
return leases;
}
public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync(
SchedulerQueueClaimOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(options);
cancellationToken.ThrowIfCancellationRequested();
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await EnsureConsumerGroupAsync(database, cancellationToken).ConfigureAwait(false);
var pending = await database.StreamPendingMessagesAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
options.BatchSize,
RedisValue.Null,
(long)options.MinIdleTime.TotalMilliseconds)
.ConfigureAwait(false);
if (pending is null || pending.Length == 0)
{
return Array.Empty<ISchedulerQueueLease<TMessage>>();
}
var eligible = pending
.Where(info => info.IdleTimeInMilliseconds >= options.MinIdleTime.TotalMilliseconds)
.ToArray();
if (eligible.Length == 0)
{
return Array.Empty<ISchedulerQueueLease<TMessage>>();
}
var messageIds = eligible
.Select(info => (RedisValue)info.MessageId)
.ToArray();
var claimed = await database.StreamClaimAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
options.ClaimantConsumer,
0,
messageIds,
CommandFlags.None)
.ConfigureAwait(false);
if (claimed is null || claimed.Length == 0)
{
PublishDepth();
return Array.Empty<ISchedulerQueueLease<TMessage>>();
}
var now = _timeProvider.GetUtcNow();
var attemptLookup = eligible.ToDictionary(
info => info.MessageId.IsNullOrEmpty ? string.Empty : info.MessageId.ToString(),
info => (int)Math.Max(1, info.DeliveryCount),
StringComparer.Ordinal);
var leases = new List<ISchedulerQueueLease<TMessage>>(claimed.Length);
foreach (var entry in claimed)
{
var entryId = entry.Id.ToString();
attemptLookup.TryGetValue(entryId, out var attempt);
var lease = TryMapLease(
entry,
options.ClaimantConsumer,
now,
_queueOptions.DefaultLeaseDuration,
attemptOverride: attempt);
if (lease is null)
{
await HandlePoisonEntryAsync(database, entry.Id).ConfigureAwait(false);
continue;
}
leases.Add(lease);
}
PublishDepth();
return leases;
}
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_connection is not null)
{
await _connection.CloseAsync();
_connection.Dispose();
}
_connectionLock.Dispose();
_groupInitLock.Dispose();
SchedulerQueueMetrics.RemoveDepth(TransportName, _payload.QueueName);
GC.SuppressFinalize(this);
}
internal async Task AcknowledgeAsync(
RedisSchedulerQueueLease<TMessage> lease,
CancellationToken cancellationToken)
{
if (!lease.TryBeginCompletion())
{
return;
}
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await database.StreamAcknowledgeAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
new RedisValue[] { lease.MessageId })
.ConfigureAwait(false);
await database.StreamDeleteAsync(
_streamOptions.Stream,
new RedisValue[] { lease.MessageId })
.ConfigureAwait(false);
SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName);
DecrementDepth();
}
internal async Task RenewLeaseAsync(
RedisSchedulerQueueLease<TMessage> lease,
TimeSpan leaseDuration,
CancellationToken cancellationToken)
{
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await database.StreamClaimAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
lease.Consumer,
0,
new RedisValue[] { lease.MessageId },
CommandFlags.None)
.ConfigureAwait(false);
var expires = _timeProvider.GetUtcNow().Add(leaseDuration);
lease.RefreshLease(expires);
}
internal async Task ReleaseAsync(
RedisSchedulerQueueLease<TMessage> lease,
SchedulerQueueReleaseDisposition disposition,
CancellationToken cancellationToken)
{
if (disposition == SchedulerQueueReleaseDisposition.Retry
&& lease.Attempt >= _queueOptions.MaxDeliveryAttempts)
{
await DeadLetterAsync(
lease,
$"max-delivery-attempts:{lease.Attempt}",
cancellationToken).ConfigureAwait(false);
return;
}
if (!lease.TryBeginCompletion())
{
return;
}
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await database.StreamAcknowledgeAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
new RedisValue[] { lease.MessageId })
.ConfigureAwait(false);
await database.StreamDeleteAsync(
_streamOptions.Stream,
new RedisValue[] { lease.MessageId })
.ConfigureAwait(false);
SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName);
DecrementDepth();
if (disposition == SchedulerQueueReleaseDisposition.Retry)
{
SchedulerQueueMetrics.RecordRetry(TransportName, _payload.QueueName);
lease.IncrementAttempt();
var backoff = CalculateBackoff(lease.Attempt);
if (backoff > TimeSpan.Zero)
{
try
{
await Task.Delay(backoff, cancellationToken).ConfigureAwait(false);
}
catch (TaskCanceledException)
{
return;
}
}
var now = _timeProvider.GetUtcNow();
var entries = BuildEntries(lease.Message, now, lease.Attempt);
await AddToStreamAsync(
database,
_streamOptions.Stream,
entries,
_streamOptions.ApproximateMaxLength,
_streamOptions.ApproximateMaxLength is not null)
.ConfigureAwait(false);
SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName);
IncrementDepth();
}
}
internal async Task DeadLetterAsync(
RedisSchedulerQueueLease<TMessage> lease,
string reason,
CancellationToken cancellationToken)
{
if (!lease.TryBeginCompletion())
{
return;
}
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await database.StreamAcknowledgeAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
new RedisValue[] { lease.MessageId })
.ConfigureAwait(false);
await database.StreamDeleteAsync(
_streamOptions.Stream,
new RedisValue[] { lease.MessageId })
.ConfigureAwait(false);
DecrementDepth();
if (!_queueOptions.DeadLetterEnabled)
{
_logger.LogWarning(
"Dropped {Queue} message {MessageId} after {Attempt} attempt(s); dead-letter disabled. Reason: {Reason}",
_payload.QueueName,
lease.MessageId,
lease.Attempt,
reason);
return;
}
var now = _timeProvider.GetUtcNow();
var entries = BuildEntries(lease.Message, now, lease.Attempt);
await AddToStreamAsync(
database,
_streamOptions.DeadLetterStream,
entries,
null,
false)
.ConfigureAwait(false);
SchedulerQueueMetrics.RecordDeadLetter(TransportName, _payload.QueueName);
_logger.LogError(
"Dead-lettered {Queue} message {MessageId} after {Attempt} attempt(s): {Reason}",
_payload.QueueName,
lease.MessageId,
lease.Attempt,
reason);
}
public async ValueTask PingAsync(CancellationToken cancellationToken)
{
var database = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await database.ExecuteAsync("PING").ConfigureAwait(false);
}
private string BuildIdempotencyKey(string key)
=> string.Concat(_streamOptions.IdempotencyKeyPrefix, key);
private TimeSpan CalculateBackoff(int attempt)
{
if (attempt <= 1)
{
return _queueOptions.RetryInitialBackoff > TimeSpan.Zero
? _queueOptions.RetryInitialBackoff
: TimeSpan.Zero;
}
var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero
? _queueOptions.RetryInitialBackoff
: TimeSpan.Zero;
if (initial <= TimeSpan.Zero)
{
return TimeSpan.Zero;
}
var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero
? _queueOptions.RetryMaxBackoff
: initial;
var exponent = attempt - 1;
var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1);
var cappedTicks = Math.Min(max.Ticks, scaledTicks);
return TimeSpan.FromTicks((long)Math.Max(initial.Ticks, cappedTicks));
}
private async ValueTask<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken)
{
if (_connection is not null)
{
return _connection.GetDatabase(_redisOptions.Database ?? -1);
}
await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_connection is null)
{
var config = ConfigurationOptions.Parse(_redisOptions.ConnectionString!);
config.AbortOnConnectFail = false;
config.ConnectTimeout = (int)_redisOptions.InitializationTimeout.TotalMilliseconds;
config.ConnectRetry = 3;
if (_redisOptions.Database is not null)
{
config.DefaultDatabase = _redisOptions.Database;
}
_connection = await _connectionFactory(config).ConfigureAwait(false);
}
}
finally
{
_connectionLock.Release();
}
return _connection.GetDatabase(_redisOptions.Database ?? -1);
}
private async Task EnsureConsumerGroupAsync(
IDatabase database,
CancellationToken cancellationToken)
{
if (_groupInitialized)
{
return;
}
await _groupInitLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_groupInitialized)
{
return;
}
try
{
await database.StreamCreateConsumerGroupAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
StreamPosition.Beginning,
createStream: true)
.ConfigureAwait(false);
}
catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase))
{
// Group already exists.
}
_groupInitialized = true;
}
finally
{
_groupInitLock.Release();
}
}
private NameValueEntry[] BuildEntries(
TMessage message,
DateTimeOffset enqueuedAt,
int attempt)
{
var attributes = _payload.GetAttributes(message);
var attributeCount = attributes?.Count ?? 0;
var entries = ArrayPool<NameValueEntry>.Shared.Rent(10 + attributeCount);
var index = 0;
entries[index++] = new NameValueEntry(SchedulerQueueFields.QueueKind, _payload.QueueName);
entries[index++] = new NameValueEntry(SchedulerQueueFields.RunId, _payload.GetRunId(message));
entries[index++] = new NameValueEntry(SchedulerQueueFields.TenantId, _payload.GetTenantId(message));
var scheduleId = _payload.GetScheduleId(message);
if (!string.IsNullOrWhiteSpace(scheduleId))
{
entries[index++] = new NameValueEntry(SchedulerQueueFields.ScheduleId, scheduleId);
}
var segmentId = _payload.GetSegmentId(message);
if (!string.IsNullOrWhiteSpace(segmentId))
{
entries[index++] = new NameValueEntry(SchedulerQueueFields.SegmentId, segmentId);
}
var correlationId = _payload.GetCorrelationId(message);
if (!string.IsNullOrWhiteSpace(correlationId))
{
entries[index++] = new NameValueEntry(SchedulerQueueFields.CorrelationId, correlationId);
}
entries[index++] = new NameValueEntry(SchedulerQueueFields.IdempotencyKey, _payload.GetIdempotencyKey(message));
entries[index++] = new NameValueEntry(SchedulerQueueFields.Attempt, attempt);
entries[index++] = new NameValueEntry(SchedulerQueueFields.EnqueuedAt, enqueuedAt.ToUnixTimeMilliseconds());
entries[index++] = new NameValueEntry(SchedulerQueueFields.Payload, _payload.Serialize(message));
if (attributeCount > 0 && attributes is not null)
{
foreach (var kvp in attributes)
{
entries[index++] = new NameValueEntry(
SchedulerQueueFields.AttributePrefix + kvp.Key,
kvp.Value);
}
}
var result = entries.AsSpan(0, index).ToArray();
ArrayPool<NameValueEntry>.Shared.Return(entries, clearArray: true);
return result;
}
private RedisSchedulerQueueLease<TMessage>? TryMapLease(
StreamEntry entry,
string consumer,
DateTimeOffset now,
TimeSpan leaseDuration,
int? attemptOverride)
{
if (entry.Values is null || entry.Values.Length == 0)
{
return null;
}
string? payload = null;
string? runId = null;
string? tenantId = null;
string? scheduleId = null;
string? segmentId = null;
string? correlationId = null;
string? idempotencyKey = null;
long? enqueuedAtUnix = null;
var attempt = attemptOverride ?? 1;
var attributes = new Dictionary<string, string>(StringComparer.Ordinal);
foreach (var field in entry.Values)
{
var name = field.Name.ToString();
var value = field.Value;
if (name.Equals(SchedulerQueueFields.Payload, StringComparison.Ordinal))
{
payload = value.ToString();
}
else if (name.Equals(SchedulerQueueFields.RunId, StringComparison.Ordinal))
{
runId = value.ToString();
}
else if (name.Equals(SchedulerQueueFields.TenantId, StringComparison.Ordinal))
{
tenantId = value.ToString();
}
else if (name.Equals(SchedulerQueueFields.ScheduleId, StringComparison.Ordinal))
{
scheduleId = NormalizeOptional(value.ToString());
}
else if (name.Equals(SchedulerQueueFields.SegmentId, StringComparison.Ordinal))
{
segmentId = NormalizeOptional(value.ToString());
}
else if (name.Equals(SchedulerQueueFields.CorrelationId, StringComparison.Ordinal))
{
correlationId = NormalizeOptional(value.ToString());
}
else if (name.Equals(SchedulerQueueFields.IdempotencyKey, StringComparison.Ordinal))
{
idempotencyKey = value.ToString();
}
else if (name.Equals(SchedulerQueueFields.EnqueuedAt, StringComparison.Ordinal))
{
if (long.TryParse(value.ToString(), out var unixMs))
{
enqueuedAtUnix = unixMs;
}
}
else if (name.Equals(SchedulerQueueFields.Attempt, StringComparison.Ordinal))
{
if (int.TryParse(value.ToString(), out var parsedAttempt))
{
attempt = attemptOverride.HasValue
? Math.Max(attemptOverride.Value, parsedAttempt)
: Math.Max(1, parsedAttempt);
}
}
else if (name.StartsWith(SchedulerQueueFields.AttributePrefix, StringComparison.Ordinal))
{
var key = name[SchedulerQueueFields.AttributePrefix.Length..];
attributes[key] = value.ToString();
}
}
if (payload is null || runId is null || tenantId is null || enqueuedAtUnix is null || idempotencyKey is null)
{
return null;
}
var message = _payload.Deserialize(payload);
var enqueuedAt = DateTimeOffset.FromUnixTimeMilliseconds(enqueuedAtUnix.Value);
var leaseExpires = now.Add(leaseDuration);
IReadOnlyDictionary<string, string> attributeView = attributes.Count == 0
? EmptyReadOnlyDictionary<string, string>.Instance
: new ReadOnlyDictionary<string, string>(attributes);
return new RedisSchedulerQueueLease<TMessage>(
this,
entry.Id.ToString(),
idempotencyKey,
runId,
tenantId,
scheduleId,
segmentId,
correlationId,
attributeView,
message,
attempt,
enqueuedAt,
leaseExpires,
consumer);
}
private async Task HandlePoisonEntryAsync(IDatabase database, RedisValue entryId)
{
await database.StreamAcknowledgeAsync(
_streamOptions.Stream,
_streamOptions.ConsumerGroup,
new RedisValue[] { entryId })
.ConfigureAwait(false);
await database.StreamDeleteAsync(
_streamOptions.Stream,
new RedisValue[] { entryId })
.ConfigureAwait(false);
}
private async Task<RedisValue> AddToStreamAsync(
IDatabase database,
RedisKey stream,
NameValueEntry[] entries,
int? maxLength,
bool useApproximateLength)
{
var capacity = 4 + (entries.Length * 2);
var args = new List<object>(capacity)
{
stream
};
if (maxLength.HasValue)
{
args.Add("MAXLEN");
if (useApproximateLength)
{
args.Add("~");
}
args.Add(maxLength.Value);
}
args.Add("*");
for (var i = 0; i < entries.Length; i++)
{
args.Add(entries[i].Name);
args.Add(entries[i].Value);
}
var result = await database.ExecuteAsync("XADD", args.ToArray()).ConfigureAwait(false);
return (RedisValue)result!;
}
private void IncrementDepth()
{
var depth = Interlocked.Increment(ref _approximateDepth);
SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth);
}
private void DecrementDepth()
{
var depth = Interlocked.Decrement(ref _approximateDepth);
if (depth < 0)
{
depth = Interlocked.Exchange(ref _approximateDepth, 0);
}
SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth);
}
private void PublishDepth()
{
var depth = Volatile.Read(ref _approximateDepth);
SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth);
}
private static string? NormalizeOptional(string? value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return value;
}
private sealed class EmptyReadOnlyDictionary<TKey, TValue>
where TKey : notnull
{
public static readonly IReadOnlyDictionary<TKey, TValue> Instance =
new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default));
}
}

View File

@@ -0,0 +1,91 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scheduler.Queue.Redis;
internal sealed class RedisSchedulerQueueLease<TMessage> : ISchedulerQueueLease<TMessage>
{
private readonly RedisSchedulerQueueBase<TMessage> _queue;
private int _completed;
internal RedisSchedulerQueueLease(
RedisSchedulerQueueBase<TMessage> queue,
string messageId,
string idempotencyKey,
string runId,
string tenantId,
string? scheduleId,
string? segmentId,
string? correlationId,
IReadOnlyDictionary<string, string> attributes,
TMessage message,
int attempt,
DateTimeOffset enqueuedAt,
DateTimeOffset leaseExpiresAt,
string consumer)
{
_queue = queue;
MessageId = messageId;
IdempotencyKey = idempotencyKey;
RunId = runId;
TenantId = tenantId;
ScheduleId = scheduleId;
SegmentId = segmentId;
CorrelationId = correlationId;
Attributes = attributes;
Message = message;
Attempt = attempt;
EnqueuedAt = enqueuedAt;
LeaseExpiresAt = leaseExpiresAt;
Consumer = consumer;
}
public string MessageId { get; }
public string IdempotencyKey { get; }
public string RunId { get; }
public string TenantId { get; }
public string? ScheduleId { get; }
public string? SegmentId { get; }
public string? CorrelationId { get; }
public IReadOnlyDictionary<string, string> Attributes { get; }
public TMessage Message { get; }
public int Attempt { get; private set; }
public DateTimeOffset EnqueuedAt { get; }
public DateTimeOffset LeaseExpiresAt { get; private set; }
public string Consumer { get; }
public Task AcknowledgeAsync(CancellationToken cancellationToken = default)
=> _queue.AcknowledgeAsync(this, cancellationToken);
public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default)
=> _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken);
public Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default)
=> _queue.ReleaseAsync(this, disposition, cancellationToken);
public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default)
=> _queue.DeadLetterAsync(this, reason, cancellationToken);
internal bool TryBeginCompletion()
=> Interlocked.CompareExchange(ref _completed, 1, 0) == 0;
internal void RefreshLease(DateTimeOffset expiresAt)
=> LeaseExpiresAt = expiresAt;
internal void IncrementAttempt()
=> Attempt++;
}

View File

@@ -0,0 +1,90 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StackExchange.Redis;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Queue.Redis;
internal sealed class RedisSchedulerRunnerQueue
: RedisSchedulerQueueBase<RunnerSegmentQueueMessage>, ISchedulerRunnerQueue
{
public RedisSchedulerRunnerQueue(
SchedulerQueueOptions queueOptions,
SchedulerRedisQueueOptions redisOptions,
ILogger<RedisSchedulerRunnerQueue> logger,
TimeProvider timeProvider,
Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null)
: base(
queueOptions,
redisOptions,
redisOptions.Runner,
RunnerPayload.Instance,
logger,
timeProvider,
connectionFactory)
{
}
private sealed class RunnerPayload : IRedisSchedulerQueuePayload<RunnerSegmentQueueMessage>
{
public static RunnerPayload Instance { get; } = new();
public string QueueName => "runner";
public string GetIdempotencyKey(RunnerSegmentQueueMessage message)
=> message.IdempotencyKey;
public string Serialize(RunnerSegmentQueueMessage message)
=> CanonicalJsonSerializer.Serialize(message);
public RunnerSegmentQueueMessage Deserialize(string payload)
=> CanonicalJsonSerializer.Deserialize<RunnerSegmentQueueMessage>(payload);
public string GetRunId(RunnerSegmentQueueMessage message)
=> message.RunId;
public string GetTenantId(RunnerSegmentQueueMessage message)
=> message.TenantId;
public string? GetScheduleId(RunnerSegmentQueueMessage message)
=> message.ScheduleId;
public string? GetSegmentId(RunnerSegmentQueueMessage message)
=> message.SegmentId;
public string? GetCorrelationId(RunnerSegmentQueueMessage message)
=> message.CorrelationId;
public IReadOnlyDictionary<string, string>? GetAttributes(RunnerSegmentQueueMessage message)
{
if (message.Attributes.Count == 0 && message.ImageDigests.Count == 0)
{
return null;
}
// Ensure digests remain accessible without deserializing the entire payload.
var map = new Dictionary<string, string>(message.Attributes, StringComparer.Ordinal);
map["imageDigestCount"] = message.ImageDigests.Count.ToString();
// populate first few digests for quick inspection (bounded)
var take = Math.Min(message.ImageDigests.Count, 5);
for (var i = 0; i < take; i++)
{
map[$"digest{i}"] = message.ImageDigests[i];
}
if (message.RatePerSecond.HasValue)
{
map["ratePerSecond"] = message.RatePerSecond.Value.ToString();
}
map["usageOnly"] = message.UsageOnly ? "true" : "false";
return map;
}
}
}

View File

@@ -0,0 +1,274 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Queue;
public sealed class PlannerQueueMessage
{
[JsonConstructor]
public PlannerQueueMessage(
Run run,
ImpactSet impactSet,
Schedule? schedule = null,
string? correlationId = null)
{
Run = run ?? throw new ArgumentNullException(nameof(run));
ImpactSet = impactSet ?? throw new ArgumentNullException(nameof(impactSet));
if (schedule is not null && string.IsNullOrWhiteSpace(schedule.Id))
{
throw new ArgumentException("Schedule must have a valid identifier.", nameof(schedule));
}
if (!string.IsNullOrWhiteSpace(correlationId))
{
correlationId = correlationId!.Trim();
}
Schedule = schedule;
CorrelationId = string.IsNullOrWhiteSpace(correlationId) ? null : correlationId;
}
public Run Run { get; }
public ImpactSet ImpactSet { get; }
public Schedule? Schedule { get; }
public string? CorrelationId { get; }
public string IdempotencyKey => Run.Id;
public string TenantId => Run.TenantId;
public string? ScheduleId => Run.ScheduleId;
}
public sealed class RunnerSegmentQueueMessage
{
private readonly ReadOnlyCollection<string> _imageDigests;
private readonly IReadOnlyDictionary<string, string> _attributes;
[JsonConstructor]
public RunnerSegmentQueueMessage(
string segmentId,
string runId,
string tenantId,
IReadOnlyList<string> imageDigests,
string? scheduleId = null,
int? ratePerSecond = null,
bool usageOnly = true,
IReadOnlyDictionary<string, string>? attributes = null,
string? correlationId = null)
{
if (string.IsNullOrWhiteSpace(segmentId))
{
throw new ArgumentException("Segment identifier must be provided.", nameof(segmentId));
}
if (string.IsNullOrWhiteSpace(runId))
{
throw new ArgumentException("Run identifier must be provided.", nameof(runId));
}
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant identifier must be provided.", nameof(tenantId));
}
SegmentId = segmentId;
RunId = runId;
TenantId = tenantId;
ScheduleId = string.IsNullOrWhiteSpace(scheduleId) ? null : scheduleId;
RatePerSecond = ratePerSecond;
UsageOnly = usageOnly;
CorrelationId = string.IsNullOrWhiteSpace(correlationId) ? null : correlationId;
_imageDigests = new ReadOnlyCollection<string>(NormalizeDigests(imageDigests));
_attributes = attributes is null
? EmptyReadOnlyDictionary<string, string>.Instance
: new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal));
}
public string SegmentId { get; }
public string RunId { get; }
public string TenantId { get; }
public string? ScheduleId { get; }
public int? RatePerSecond { get; }
public bool UsageOnly { get; }
public string? CorrelationId { get; }
public IReadOnlyList<string> ImageDigests => _imageDigests;
public IReadOnlyDictionary<string, string> Attributes => _attributes;
public string IdempotencyKey => SegmentId;
private static List<string> NormalizeDigests(IReadOnlyList<string> digests)
{
if (digests is null)
{
throw new ArgumentNullException(nameof(digests));
}
var list = new List<string>();
foreach (var digest in digests)
{
if (string.IsNullOrWhiteSpace(digest))
{
continue;
}
list.Add(digest.Trim());
}
if (list.Count == 0)
{
throw new ArgumentException("At least one image digest must be provided.", nameof(digests));
}
return list;
}
private sealed class EmptyReadOnlyDictionary<TKey, TValue>
where TKey : notnull
{
public static readonly IReadOnlyDictionary<TKey, TValue> Instance =
new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default));
}
}
public readonly record struct SchedulerQueueEnqueueResult(string MessageId, bool Deduplicated);
public sealed class SchedulerQueueLeaseRequest
{
public SchedulerQueueLeaseRequest(string consumer, int batchSize, TimeSpan leaseDuration)
{
if (string.IsNullOrWhiteSpace(consumer))
{
throw new ArgumentException("Consumer identifier must be provided.", nameof(consumer));
}
if (batchSize <= 0)
{
throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive.");
}
if (leaseDuration <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive.");
}
Consumer = consumer;
BatchSize = batchSize;
LeaseDuration = leaseDuration;
}
public string Consumer { get; }
public int BatchSize { get; }
public TimeSpan LeaseDuration { get; }
}
public sealed class SchedulerQueueClaimOptions
{
public SchedulerQueueClaimOptions(string claimantConsumer, int batchSize, TimeSpan minIdleTime)
{
if (string.IsNullOrWhiteSpace(claimantConsumer))
{
throw new ArgumentException("Consumer identifier must be provided.", nameof(claimantConsumer));
}
if (batchSize <= 0)
{
throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive.");
}
if (minIdleTime < TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(minIdleTime), minIdleTime, "Idle time cannot be negative.");
}
ClaimantConsumer = claimantConsumer;
BatchSize = batchSize;
MinIdleTime = minIdleTime;
}
public string ClaimantConsumer { get; }
public int BatchSize { get; }
public TimeSpan MinIdleTime { get; }
}
public enum SchedulerQueueReleaseDisposition
{
Retry,
Abandon
}
public interface ISchedulerQueue<TMessage>
{
ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(TMessage message, CancellationToken cancellationToken = default);
ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync(SchedulerQueueLeaseRequest request, CancellationToken cancellationToken = default);
ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync(SchedulerQueueClaimOptions options, CancellationToken cancellationToken = default);
}
public interface ISchedulerQueueLease<out TMessage>
{
string MessageId { get; }
int Attempt { get; }
DateTimeOffset EnqueuedAt { get; }
DateTimeOffset LeaseExpiresAt { get; }
string Consumer { get; }
string TenantId { get; }
string RunId { get; }
string? ScheduleId { get; }
string? SegmentId { get; }
string? CorrelationId { get; }
string IdempotencyKey { get; }
IReadOnlyDictionary<string, string> Attributes { get; }
TMessage Message { get; }
Task AcknowledgeAsync(CancellationToken cancellationToken = default);
Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default);
Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default);
Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default);
}
public interface ISchedulerPlannerQueue : ISchedulerQueue<PlannerQueueMessage>
{
}
public interface ISchedulerRunnerQueue : ISchedulerQueue<RunnerSegmentQueueMessage>
{
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Scheduler.Queue;
internal static class SchedulerQueueFields
{
public const string Payload = "payload";
public const string Attempt = "attempt";
public const string EnqueuedAt = "enqueuedAt";
public const string IdempotencyKey = "idempotency";
public const string RunId = "runId";
public const string TenantId = "tenantId";
public const string ScheduleId = "scheduleId";
public const string SegmentId = "segmentId";
public const string QueueKind = "queueKind";
public const string CorrelationId = "correlationId";
public const string AttributePrefix = "attr:";
}

View File

@@ -0,0 +1,72 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Diagnostics.HealthChecks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scheduler.Queue;
public sealed class SchedulerQueueHealthCheck : IHealthCheck
{
private readonly ISchedulerPlannerQueue _plannerQueue;
private readonly ISchedulerRunnerQueue _runnerQueue;
private readonly ILogger<SchedulerQueueHealthCheck> _logger;
public SchedulerQueueHealthCheck(
ISchedulerPlannerQueue plannerQueue,
ISchedulerRunnerQueue runnerQueue,
ILogger<SchedulerQueueHealthCheck> logger)
{
_plannerQueue = plannerQueue ?? throw new ArgumentNullException(nameof(plannerQueue));
_runnerQueue = runnerQueue ?? throw new ArgumentNullException(nameof(runnerQueue));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<HealthCheckResult> CheckHealthAsync(
HealthCheckContext context,
CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var failures = new List<string>();
if (!await ProbeAsync(_plannerQueue, "planner", cancellationToken).ConfigureAwait(false))
{
failures.Add("planner transport unreachable");
}
if (!await ProbeAsync(_runnerQueue, "runner", cancellationToken).ConfigureAwait(false))
{
failures.Add("runner transport unreachable");
}
if (failures.Count == 0)
{
return HealthCheckResult.Healthy("Scheduler queues reachable.");
}
var description = string.Join("; ", failures);
return new HealthCheckResult(
context.Registration.FailureStatus,
description);
}
private async Task<bool> ProbeAsync(object queue, string label, CancellationToken cancellationToken)
{
try
{
if (queue is ISchedulerQueueTransportDiagnostics diagnostics)
{
await diagnostics.PingAsync(cancellationToken).ConfigureAwait(false);
}
return true;
}
catch (Exception ex)
{
_logger.LogError(ex, "Scheduler {Label} queue transport ping failed.", label);
return false;
}
}
}

View File

@@ -0,0 +1,65 @@
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using System.Linq;
namespace StellaOps.Scheduler.Queue;
internal static class SchedulerQueueMetrics
{
private const string TransportTagName = "transport";
private const string QueueTagName = "queue";
private static readonly Meter Meter = new("StellaOps.Scheduler.Queue");
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>("scheduler_queue_enqueued_total");
private static readonly Counter<long> DeduplicatedCounter = Meter.CreateCounter<long>("scheduler_queue_deduplicated_total");
private static readonly Counter<long> AckCounter = Meter.CreateCounter<long>("scheduler_queue_ack_total");
private static readonly Counter<long> RetryCounter = Meter.CreateCounter<long>("scheduler_queue_retry_total");
private static readonly Counter<long> DeadLetterCounter = Meter.CreateCounter<long>("scheduler_queue_deadletter_total");
private static readonly ConcurrentDictionary<(string transport, string queue), long> DepthSamples = new();
private static readonly ObservableGauge<long> DepthGauge = Meter.CreateObservableGauge<long>(
"scheduler_queue_depth",
ObserveDepth);
public static void RecordEnqueued(string transport, string queue)
=> EnqueuedCounter.Add(1, BuildTags(transport, queue));
public static void RecordDeduplicated(string transport, string queue)
=> DeduplicatedCounter.Add(1, BuildTags(transport, queue));
public static void RecordAck(string transport, string queue)
=> AckCounter.Add(1, BuildTags(transport, queue));
public static void RecordRetry(string transport, string queue)
=> RetryCounter.Add(1, BuildTags(transport, queue));
public static void RecordDeadLetter(string transport, string queue)
=> DeadLetterCounter.Add(1, BuildTags(transport, queue));
public static void RecordDepth(string transport, string queue, long depth)
=> DepthSamples[(transport, queue)] = depth;
public static void RemoveDepth(string transport, string queue)
=> DepthSamples.TryRemove((transport, queue), out _);
internal static IReadOnlyDictionary<(string transport, string queue), long> SnapshotDepths()
=> DepthSamples.ToDictionary(pair => pair.Key, pair => pair.Value);
private static KeyValuePair<string, object?>[] BuildTags(string transport, string queue)
=> new[]
{
new KeyValuePair<string, object?>(TransportTagName, transport),
new KeyValuePair<string, object?>(QueueTagName, queue)
};
private static IEnumerable<Measurement<long>> ObserveDepth()
{
foreach (var sample in DepthSamples)
{
yield return new Measurement<long>(
sample.Value,
new KeyValuePair<string, object?>(TransportTagName, sample.Key.transport),
new KeyValuePair<string, object?>(QueueTagName, sample.Key.queue));
}
}
}

View File

@@ -0,0 +1,134 @@
using System;
namespace StellaOps.Scheduler.Queue;
public sealed class SchedulerQueueOptions
{
public SchedulerQueueTransportKind Kind { get; set; } = SchedulerQueueTransportKind.Redis;
public SchedulerRedisQueueOptions Redis { get; set; } = new();
public SchedulerNatsQueueOptions Nats { get; set; } = new();
/// <summary>
/// Default lease/visibility window applied when callers do not override the duration.
/// </summary>
public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5);
/// <summary>
/// Maximum number of deliveries before a message is shunted to the dead-letter stream.
/// </summary>
public int MaxDeliveryAttempts { get; set; } = 5;
/// <summary>
/// Enables routing exhausted deliveries to the configured dead-letter transport.
/// When disabled, messages exceeding the attempt budget are dropped after acknowledgement.
/// </summary>
public bool DeadLetterEnabled { get; set; } = true;
/// <summary>
/// Base retry delay used when a message is released for retry.
/// </summary>
public TimeSpan RetryInitialBackoff { get; set; } = TimeSpan.FromSeconds(5);
/// <summary>
/// Cap applied to the retry delay when exponential backoff is used.
/// </summary>
public TimeSpan RetryMaxBackoff { get; set; } = TimeSpan.FromMinutes(1);
}
public sealed class SchedulerRedisQueueOptions
{
public string? ConnectionString { get; set; }
public int? Database { get; set; }
public TimeSpan InitializationTimeout { get; set; } = TimeSpan.FromSeconds(30);
public RedisSchedulerStreamOptions Planner { get; set; } = RedisSchedulerStreamOptions.ForPlanner();
public RedisSchedulerStreamOptions Runner { get; set; } = RedisSchedulerStreamOptions.ForRunner();
}
public sealed class RedisSchedulerStreamOptions
{
public string Stream { get; set; } = string.Empty;
public string ConsumerGroup { get; set; } = string.Empty;
public string DeadLetterStream { get; set; } = string.Empty;
public string IdempotencyKeyPrefix { get; set; } = string.Empty;
public TimeSpan IdempotencyWindow { get; set; } = TimeSpan.FromHours(12);
public int? ApproximateMaxLength { get; set; }
public static RedisSchedulerStreamOptions ForPlanner()
=> new()
{
Stream = "scheduler:planner",
ConsumerGroup = "scheduler-planners",
DeadLetterStream = "scheduler:planner:dead",
IdempotencyKeyPrefix = "scheduler:planner:idemp:"
};
public static RedisSchedulerStreamOptions ForRunner()
=> new()
{
Stream = "scheduler:runner",
ConsumerGroup = "scheduler-runners",
DeadLetterStream = "scheduler:runner:dead",
IdempotencyKeyPrefix = "scheduler:runner:idemp:"
};
}
public sealed class SchedulerNatsQueueOptions
{
public string? Url { get; set; }
public TimeSpan IdleHeartbeat { get; set; } = TimeSpan.FromSeconds(30);
public SchedulerNatsStreamOptions Planner { get; set; } = SchedulerNatsStreamOptions.ForPlanner();
public SchedulerNatsStreamOptions Runner { get; set; } = SchedulerNatsStreamOptions.ForRunner();
}
public sealed class SchedulerNatsStreamOptions
{
public string Stream { get; set; } = string.Empty;
public string Subject { get; set; } = string.Empty;
public string DurableConsumer { get; set; } = string.Empty;
public string DeadLetterStream { get; set; } = string.Empty;
public string DeadLetterSubject { get; set; } = string.Empty;
public int MaxAckPending { get; set; } = 64;
public TimeSpan AckWait { get; set; } = TimeSpan.FromMinutes(5);
public TimeSpan RetryDelay { get; set; } = TimeSpan.FromSeconds(10);
public static SchedulerNatsStreamOptions ForPlanner()
=> new()
{
Stream = "SCHEDULER_PLANNER",
Subject = "scheduler.planner",
DurableConsumer = "scheduler-planners",
DeadLetterStream = "SCHEDULER_PLANNER_DEAD",
DeadLetterSubject = "scheduler.planner.dead"
};
public static SchedulerNatsStreamOptions ForRunner()
=> new()
{
Stream = "SCHEDULER_RUNNER",
Subject = "scheduler.runner",
DurableConsumer = "scheduler-runners",
DeadLetterStream = "SCHEDULER_RUNNER_DEAD",
DeadLetterSubject = "scheduler.runner.dead"
};
}

View File

@@ -0,0 +1,88 @@
using System;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Diagnostics.HealthChecks;
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Queue.Nats;
using StellaOps.Scheduler.Queue.Redis;
namespace StellaOps.Scheduler.Queue;
public static class SchedulerQueueServiceCollectionExtensions
{
public static IServiceCollection AddSchedulerQueues(
this IServiceCollection services,
IConfiguration configuration,
string sectionName = "scheduler:queue")
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
var options = new SchedulerQueueOptions();
configuration.GetSection(sectionName).Bind(options);
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton(options);
services.AddSingleton<ISchedulerPlannerQueue>(sp =>
{
var loggerFactory = sp.GetRequiredService<ILoggerFactory>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return options.Kind switch
{
SchedulerQueueTransportKind.Redis => new RedisSchedulerPlannerQueue(
options,
options.Redis,
loggerFactory.CreateLogger<RedisSchedulerPlannerQueue>(),
timeProvider),
SchedulerQueueTransportKind.Nats => new NatsSchedulerPlannerQueue(
options,
options.Nats,
loggerFactory.CreateLogger<NatsSchedulerPlannerQueue>(),
timeProvider),
_ => throw new InvalidOperationException($"Unsupported scheduler queue transport '{options.Kind}'.")
};
});
services.AddSingleton<ISchedulerRunnerQueue>(sp =>
{
var loggerFactory = sp.GetRequiredService<ILoggerFactory>();
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return options.Kind switch
{
SchedulerQueueTransportKind.Redis => new RedisSchedulerRunnerQueue(
options,
options.Redis,
loggerFactory.CreateLogger<RedisSchedulerRunnerQueue>(),
timeProvider),
SchedulerQueueTransportKind.Nats => new NatsSchedulerRunnerQueue(
options,
options.Nats,
loggerFactory.CreateLogger<NatsSchedulerRunnerQueue>(),
timeProvider),
_ => throw new InvalidOperationException($"Unsupported scheduler queue transport '{options.Kind}'.")
};
});
services.AddSingleton<SchedulerQueueHealthCheck>();
return services;
}
public static IHealthChecksBuilder AddSchedulerQueueHealthCheck(
this IHealthChecksBuilder builder)
{
ArgumentNullException.ThrowIfNull(builder);
builder.Services.TryAddSingleton<SchedulerQueueHealthCheck>();
builder.AddCheck<SchedulerQueueHealthCheck>(
name: "scheduler-queue",
failureStatus: HealthStatus.Unhealthy,
tags: new[] { "scheduler", "queue" });
return builder;
}
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Scheduler.Queue;
/// <summary>
/// Transport backends supported by the scheduler queue abstraction.
/// </summary>
public enum SchedulerQueueTransportKind
{
Redis = 0,
Nats = 1,
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="NATS.Client.Core" Version="2.0.0" />
<PackageReference Include="NATS.Client.JetStream" Version="2.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,9 @@
# Scheduler Queue Task Board (Sprint 16)
> **Status note (2025-10-19):** Scheduler DTOs and sample payloads are now available (SCHED-MODELS-16-102). Queue tasks remain pending on this board.
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| SCHED-QUEUE-16-401 | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-MODELS-16-101 | Implement queue abstraction + Redis Streams adapter (planner inputs, runner segments) with ack/lease semantics. | Integration tests cover enqueue/dequeue/ack; lease renewal implemented; ordering preserved. |
| SCHED-QUEUE-16-402 | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Add NATS JetStream adapter with configuration binding, health probes, failover. | Health endpoints verified; failover documented; adapter tested. |
| SCHED-QUEUE-16-403 | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Dead-letter handling + metrics (queue depth, retry counts), configuration toggles. | Dead-letter policy tested; metrics exported; docs updated. |

View File

@@ -0,0 +1,4 @@
# StellaOps.Scheduler.Storage.Mongo — Agent Charter
## Mission
Implement Mongo persistence (schedules, runs, impact cursors, locks, audit) per `docs/ARCHITECTURE_SCHEDULER.md`.

View File

@@ -0,0 +1,88 @@
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Documents;
[BsonIgnoreExtraElements]
internal sealed class RunSummaryDocument
{
public string Id { get; set; } = string.Empty;
public string TenantId { get; set; } = string.Empty;
public string ScheduleId { get; set; } = string.Empty;
public DateTime UpdatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UnixEpoch, DateTimeKind.Utc);
public RunSummaryEntryDocument? LastRun { get; set; }
= null;
public List<RunSummaryEntryDocument> Recent { get; set; } = new();
public RunSummaryCountersDocument Counters { get; set; } = new();
}
internal sealed class RunSummaryEntryDocument
{
public string RunId { get; set; } = string.Empty;
[BsonRepresentation(BsonType.String)]
public RunTrigger Trigger { get; set; } = RunTrigger.Cron;
[BsonRepresentation(BsonType.String)]
public RunState State { get; set; } = RunState.Planning;
public DateTime CreatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UnixEpoch, DateTimeKind.Utc);
public DateTime? StartedAt { get; set; }
= null;
public DateTime? FinishedAt { get; set; }
= null;
public RunStats Stats { get; set; } = RunStats.Empty;
[BsonIgnoreIfNull]
public string? Error { get; set; }
= null;
}
internal sealed class RunSummaryCountersDocument
{
public int Total { get; set; }
= 0;
public int Planning { get; set; }
= 0;
public int Queued { get; set; }
= 0;
public int Running { get; set; }
= 0;
public int Completed { get; set; }
= 0;
public int Error { get; set; }
= 0;
public int Cancelled { get; set; }
= 0;
public int TotalDeltas { get; set; }
= 0;
public int TotalNewCriticals { get; set; }
= 0;
public int TotalNewHigh { get; set; }
= 0;
public int TotalNewMedium { get; set; }
= 0;
public int TotalNewLow { get; set; }
= 0;
}

View File

@@ -0,0 +1,46 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.Scheduler.Storage.Mongo.Options;
namespace StellaOps.Scheduler.Storage.Mongo.Internal;
internal sealed class SchedulerMongoContext
{
public SchedulerMongoContext(IOptions<SchedulerMongoOptions> options, ILogger<SchedulerMongoContext> logger)
{
ArgumentNullException.ThrowIfNull(logger);
var value = options?.Value ?? throw new ArgumentNullException(nameof(options));
if (string.IsNullOrWhiteSpace(value.ConnectionString))
{
throw new InvalidOperationException("Scheduler Mongo connection string is not configured.");
}
if (string.IsNullOrWhiteSpace(value.Database))
{
throw new InvalidOperationException("Scheduler Mongo database name is not configured.");
}
Client = new MongoClient(value.ConnectionString);
var settings = new MongoDatabaseSettings();
if (value.UseMajorityReadConcern)
{
settings.ReadConcern = ReadConcern.Majority;
}
if (value.UseMajorityWriteConcern)
{
settings.WriteConcern = WriteConcern.WMajority;
}
Database = Client.GetDatabase(value.Database, settings);
Options = value;
}
public MongoClient Client { get; }
public IMongoDatabase Database { get; }
public SchedulerMongoOptions Options { get; }
}

View File

@@ -0,0 +1,32 @@
using Microsoft.Extensions.Logging;
using StellaOps.Scheduler.Storage.Mongo.Migrations;
namespace StellaOps.Scheduler.Storage.Mongo.Internal;
internal interface ISchedulerMongoInitializer
{
Task EnsureMigrationsAsync(CancellationToken cancellationToken = default);
}
internal sealed class SchedulerMongoInitializer : ISchedulerMongoInitializer
{
private readonly SchedulerMongoContext _context;
private readonly SchedulerMongoMigrationRunner _migrationRunner;
private readonly ILogger<SchedulerMongoInitializer> _logger;
public SchedulerMongoInitializer(
SchedulerMongoContext context,
SchedulerMongoMigrationRunner migrationRunner,
ILogger<SchedulerMongoInitializer> logger)
{
_context = context ?? throw new ArgumentNullException(nameof(context));
_migrationRunner = migrationRunner ?? throw new ArgumentNullException(nameof(migrationRunner));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task EnsureMigrationsAsync(CancellationToken cancellationToken = default)
{
_logger.LogInformation("Ensuring Scheduler Mongo migrations are applied for database {Database}.", _context.Options.Database);
await _migrationRunner.RunAsync(cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,27 @@
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scheduler.Storage.Mongo.Internal;
internal sealed class SchedulerMongoInitializerHostedService : IHostedService
{
private readonly ISchedulerMongoInitializer _initializer;
private readonly ILogger<SchedulerMongoInitializerHostedService> _logger;
public SchedulerMongoInitializerHostedService(
ISchedulerMongoInitializer initializer,
ILogger<SchedulerMongoInitializerHostedService> logger)
{
_initializer = initializer ?? throw new ArgumentNullException(nameof(initializer));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task StartAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Applying Scheduler Mongo migrations.");
await _initializer.EnsureMigrationsAsync(cancellationToken).ConfigureAwait(false);
}
public Task StopAsync(CancellationToken cancellationToken)
=> Task.CompletedTask;
}

View File

@@ -0,0 +1,49 @@
using Microsoft.Extensions.Logging;
using MongoDB.Driver;
using StellaOps.Scheduler.Storage.Mongo.Internal;
namespace StellaOps.Scheduler.Storage.Mongo.Migrations;
internal sealed class EnsureSchedulerCollectionsMigration : ISchedulerMongoMigration
{
private readonly ILogger<EnsureSchedulerCollectionsMigration> _logger;
public EnsureSchedulerCollectionsMigration(ILogger<EnsureSchedulerCollectionsMigration> logger)
=> _logger = logger ?? throw new ArgumentNullException(nameof(logger));
public string Id => "20251019_scheduler_collections_v1";
public async ValueTask ExecuteAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
var requiredCollections = new[]
{
context.Options.SchedulesCollection,
context.Options.RunsCollection,
context.Options.ImpactSnapshotsCollection,
context.Options.AuditCollection,
context.Options.RunSummariesCollection,
context.Options.GraphJobsCollection,
context.Options.LocksCollection,
context.Options.MigrationsCollection
};
var cursor = await context.Database
.ListCollectionNamesAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var existing = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false);
foreach (var collection in requiredCollections)
{
if (existing.Contains(collection, StringComparer.Ordinal))
{
continue;
}
_logger.LogInformation("Creating Scheduler Mongo collection '{CollectionName}'.", collection);
await context.Database.CreateCollectionAsync(collection, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,240 @@
using System;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Storage.Mongo.Internal;
namespace StellaOps.Scheduler.Storage.Mongo.Migrations;
internal sealed class EnsureSchedulerIndexesMigration : ISchedulerMongoMigration
{
public string Id => "20251019_scheduler_indexes_v1";
public async ValueTask ExecuteAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(context);
await EnsureSchedulesIndexesAsync(context, cancellationToken).ConfigureAwait(false);
await EnsureRunsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
await EnsureImpactSnapshotsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
await EnsureAuditIndexesAsync(context, cancellationToken).ConfigureAwait(false);
await EnsureRunSummariesIndexesAsync(context, cancellationToken).ConfigureAwait(false);
await EnsureGraphJobsIndexesAsync(context, cancellationToken).ConfigureAwait(false);
await EnsureLocksIndexesAsync(context, cancellationToken).ConfigureAwait(false);
}
private static async Task EnsureSchedulesIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.SchedulesCollection);
var tenantEnabled = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Ascending("enabled"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_enabled"
});
var cronTimezone = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("cronExpression")
.Ascending("timezone"),
new CreateIndexOptions<BsonDocument>
{
Name = "cron_timezone"
});
await collection.Indexes.CreateManyAsync(new[] { tenantEnabled, cronTimezone }, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static async Task EnsureRunsIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.RunsCollection);
var tenantCreated = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Descending("createdAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_createdAt_desc"
});
var stateIndex = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("state"),
new CreateIndexOptions<BsonDocument>
{
Name = "state_lookup"
});
var scheduleIndex = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("scheduleId")
.Descending("createdAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "schedule_createdAt_desc"
});
var models = new List<CreateIndexModel<BsonDocument>> { tenantCreated, stateIndex, scheduleIndex };
if (context.Options.CompletedRunRetention > TimeSpan.Zero)
{
var ttlModel = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys.Ascending("finishedAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "finishedAt_ttl",
ExpireAfter = context.Options.CompletedRunRetention
});
models.Add(ttlModel);
}
await collection.Indexes.CreateManyAsync(models, cancellationToken: cancellationToken).ConfigureAwait(false);
}
private static async Task EnsureImpactSnapshotsIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.ImpactSnapshotsCollection);
var tenantScope = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("selector.tenantId")
.Ascending("selector.scope"),
new CreateIndexOptions<BsonDocument>
{
Name = "selector_tenant_scope"
});
var snapshotId = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys.Ascending("snapshotId"),
new CreateIndexOptions<BsonDocument>
{
Name = "snapshotId_unique",
Unique = true,
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("snapshotId", true)
});
await collection.Indexes.CreateManyAsync(new[] { tenantScope, snapshotId }, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static async Task EnsureAuditIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.AuditCollection);
var tenantOccurred = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Descending("occurredAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_occurredAt_desc"
});
var correlationIndex = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("correlationId"),
new CreateIndexOptions<BsonDocument>
{
Name = "correlation_lookup",
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("correlationId", true)
});
await collection.Indexes.CreateManyAsync(new[] { tenantOccurred, correlationIndex }, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static async Task EnsureRunSummariesIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.RunSummariesCollection);
var tenantSchedule = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Ascending("scheduleId"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_schedule",
Unique = true
});
var tenantUpdated = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Descending("updatedAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_updatedAt_desc"
});
await collection.Indexes.CreateManyAsync(new[] { tenantSchedule, tenantUpdated }, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static async Task EnsureGraphJobsIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.GraphJobsCollection);
var tenantKindCreated = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Ascending("kind")
.Descending("createdAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_kind_createdAt_desc"
});
var tenantStatus = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Ascending("status"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_status"
});
var snapshotIndex = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("graphSnapshotId"),
new CreateIndexOptions<BsonDocument>
{
Name = "graphSnapshot_lookup",
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("graphSnapshotId", true)
});
await collection.Indexes.CreateManyAsync(new[] { tenantKindCreated, tenantStatus, snapshotIndex }, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
private static async Task EnsureLocksIndexesAsync(SchedulerMongoContext context, CancellationToken cancellationToken)
{
var collection = context.Database.GetCollection<BsonDocument>(context.Options.LocksCollection);
var tenantResource = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys
.Ascending("tenantId")
.Ascending("resource"),
new CreateIndexOptions<BsonDocument>
{
Name = "tenant_resource_unique",
Unique = true,
PartialFilterExpression = Builders<BsonDocument>.Filter.Exists("resource", true)
});
var ttlModel = new CreateIndexModel<BsonDocument>(
Builders<BsonDocument>.IndexKeys.Ascending("expiresAt"),
new CreateIndexOptions<BsonDocument>
{
Name = "expiresAt_ttl",
ExpireAfter = TimeSpan.Zero
});
await collection.Indexes.CreateManyAsync(new[] { tenantResource, ttlModel }, cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,10 @@
using StellaOps.Scheduler.Storage.Mongo.Internal;
namespace StellaOps.Scheduler.Storage.Mongo.Migrations;
internal interface ISchedulerMongoMigration
{
string Id { get; }
ValueTask ExecuteAsync(SchedulerMongoContext context, CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,16 @@
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
namespace StellaOps.Scheduler.Storage.Mongo.Migrations;
internal sealed class SchedulerMongoMigrationRecord
{
[BsonId]
public ObjectId Id { get; set; }
[BsonElement("migrationId")]
public string MigrationId { get; set; } = string.Empty;
[BsonElement("appliedAt")]
public DateTimeOffset AppliedAt { get; set; }
}

View File

@@ -0,0 +1,77 @@
using Microsoft.Extensions.Logging;
using MongoDB.Driver;
using StellaOps.Scheduler.Storage.Mongo.Internal;
namespace StellaOps.Scheduler.Storage.Mongo.Migrations;
internal sealed class SchedulerMongoMigrationRunner
{
private readonly SchedulerMongoContext _context;
private readonly IReadOnlyList<ISchedulerMongoMigration> _migrations;
private readonly ILogger<SchedulerMongoMigrationRunner> _logger;
public SchedulerMongoMigrationRunner(
SchedulerMongoContext context,
IEnumerable<ISchedulerMongoMigration> migrations,
ILogger<SchedulerMongoMigrationRunner> logger)
{
_context = context ?? throw new ArgumentNullException(nameof(context));
ArgumentNullException.ThrowIfNull(migrations);
_migrations = migrations.OrderBy(migration => migration.Id, StringComparer.Ordinal).ToArray();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async ValueTask RunAsync(CancellationToken cancellationToken)
{
if (_migrations.Count == 0)
{
return;
}
var collection = _context.Database.GetCollection<SchedulerMongoMigrationRecord>(_context.Options.MigrationsCollection);
await EnsureMigrationIndexAsync(collection, cancellationToken).ConfigureAwait(false);
var applied = await collection
.Find(FilterDefinition<SchedulerMongoMigrationRecord>.Empty)
.Project(record => record.MigrationId)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var appliedSet = applied.ToHashSet(StringComparer.Ordinal);
foreach (var migration in _migrations)
{
if (appliedSet.Contains(migration.Id))
{
continue;
}
_logger.LogInformation("Applying Scheduler Mongo migration {MigrationId}.", migration.Id);
await migration.ExecuteAsync(_context, cancellationToken).ConfigureAwait(false);
var record = new SchedulerMongoMigrationRecord
{
Id = MongoDB.Bson.ObjectId.GenerateNewId(),
MigrationId = migration.Id,
AppliedAt = DateTimeOffset.UtcNow
};
await collection.InsertOneAsync(record, cancellationToken: cancellationToken).ConfigureAwait(false);
_logger.LogInformation("Completed Scheduler Mongo migration {MigrationId}.", migration.Id);
}
}
private static async Task EnsureMigrationIndexAsync(
IMongoCollection<SchedulerMongoMigrationRecord> collection,
CancellationToken cancellationToken)
{
var keys = Builders<SchedulerMongoMigrationRecord>.IndexKeys.Ascending(record => record.MigrationId);
var model = new CreateIndexModel<SchedulerMongoMigrationRecord>(keys, new CreateIndexOptions
{
Name = "migrationId_unique",
Unique = true
});
await collection.Indexes.CreateOneAsync(model, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,38 @@
using System;
namespace StellaOps.Scheduler.Storage.Mongo.Options;
/// <summary>
/// Configures MongoDB connectivity and collection names for Scheduler storage.
/// </summary>
public sealed class SchedulerMongoOptions
{
public string ConnectionString { get; set; } = "mongodb://localhost:27017";
public string Database { get; set; } = "stellaops_scheduler";
public string SchedulesCollection { get; set; } = "schedules";
public string RunsCollection { get; set; } = "runs";
public string ImpactSnapshotsCollection { get; set; } = "impact_snapshots";
public string AuditCollection { get; set; } = "audit";
public string RunSummariesCollection { get; set; } = "run_summaries";
public string GraphJobsCollection { get; set; } = "graph_jobs";
public string LocksCollection { get; set; } = "locks";
public string MigrationsCollection { get; set; } = "_scheduler_migrations";
/// <summary>
/// Optional TTL applied to completed runs. When zero or negative no TTL index is created.
/// </summary>
public TimeSpan CompletedRunRetention { get; set; } = TimeSpan.FromDays(180);
public bool UseMajorityReadConcern { get; set; } = true;
public bool UseMajorityWriteConcern { get; set; } = true;
}

View File

@@ -0,0 +1,36 @@
using System.Collections.Immutable;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Projections;
public sealed record RunSummaryProjection(
string TenantId,
string ScheduleId,
DateTimeOffset UpdatedAt,
RunSummarySnapshot? LastRun,
ImmutableArray<RunSummarySnapshot> Recent,
RunSummaryCounters Counters);
public sealed record RunSummarySnapshot(
string RunId,
RunTrigger Trigger,
RunState State,
DateTimeOffset CreatedAt,
DateTimeOffset? StartedAt,
DateTimeOffset? FinishedAt,
RunStats Stats,
string? Error);
public sealed record RunSummaryCounters(
int Total,
int Planning,
int Queued,
int Running,
int Completed,
int Error,
int Cancelled,
int TotalDeltas,
int TotalNewCriticals,
int TotalNewHigh,
int TotalNewMedium,
int TotalNewLow);

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Scheduler.Storage.Mongo.Tests")]

View File

@@ -0,0 +1,39 @@
# Scheduler Storage Mongo — Sprint 16 Handoff
This module now consumes the canonical DTOs defined in `StellaOps.Scheduler.Models`.
Samples covering REST shapes live under `samples/api/scheduler/` and are referenced from `docs/11_DATA_SCHEMAS.md#3.1`.
## Collections & DTO mapping
| Collection | DTO | Notes |
|-------------------|--------------------------|---------------------------------------------------------------------------------------|
| `schedules` | `Schedule` | Persist `Schedule` as-is. `_id``Schedule.Id`. Use compound indexes on `{tenantId, enabled}` and `{whenCron}` per doc. |
| `runs` | `Run` | Store `Run.Stats` inside the document; omit `deltas` array when empty. |
| `impact_snapshots`| `ImpactSet` | Normalise selector filter fields exactly as emitted by the canonical serializer. |
| `audit` | `AuditRecord` | Lower-case metadata keys are already enforced by the model. |
All timestamps are persisted as UTC (`+00:00`). Empty selector filters remain empty arrays (see `impact-set.json` sample).
## Implementation guidance
1. Add a project reference to `StellaOps.Scheduler.Models` and reuse the records directly; avoid duplicate BSON POCOs.
2. When serialising/deserialising to MongoDB, call `CanonicalJsonSerializer` to keep ordering stable for diffable fixtures.
3. Integration tests should load the JSON samples and round-trip through the Mongo persistence layer to guarantee parity.
4. Follow `docs/11_DATA_SCHEMAS.md` for index requirements; update that doc if storage diverges.
5. Register `AddSchedulerMongoStorage` in the host and call `ISchedulerMongoInitializer.EnsureMigrationsAsync` during bootstrap so collections/indexes are created before workers/web APIs start.
With these artefacts in place the dependency on SCHED-MODELS-16-101/102 is cleared—storage work can move to DOING.
## Repositories & Sessions (Sprint 16)
- `AddSchedulerMongoStorage` now registers the scheduler repositories:
- `IScheduleRepository` — CRUD helpers with tenant scoping and soft-delete markers (`deletedAt`, `deletedBy`).
- `IRunRepository` — create/update/list helpers sorted by `createdAt`, honouring the TTL index on `finishedAt`.
- `IImpactSnapshotRepository` — stores canonical `ImpactSet` snapshots with deterministic selector digests.
- `IAuditRepository` — append/list audit log entries with optional category/schedule/run filters.
- `IRunSummaryRepository` — persists the `run_summaries` materialised view keyed by tenant/schedule.
- `IRunSummaryService` — projects run updates into the materialised view (latest run + rolling counters for the last 20 updates).
- `ISchedulerAuditService` — convenience layer that stamps IDs/timestamps and writes `AuditRecord` instances with consistent metadata.
- `ISchedulerMongoSessionFactory` provides causal-consistent Mongo sessions (default `CausalConsistency = true`) that repositories accept for read-after-write flows.
- All repositories persist canonical JSON via `CanonicalJsonSerializer`; helper mappers strip internal fields before materialising DTOs.
- Soft-deleted schedules keep historical documents but are excluded from queries unless `ScheduleQueryOptions.IncludeDeleted` is set; deletions also disable the schedule and bump `updatedAt/updatedBy`.

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
/// <summary>
/// Filters applied when querying scheduler audit records.
/// </summary>
public sealed class AuditQueryOptions
{
/// <summary>
/// Optional audit category filter (e.g., "scheduler").
/// </summary>
public string? Category { get; init; }
/// <summary>
/// Optional schedule identifier filter.
/// </summary>
public string? ScheduleId { get; init; }
/// <summary>
/// Optional run identifier filter.
/// </summary>
public string? RunId { get; init; }
/// <summary>
/// Lower bound for audit occurrence timestamp.
/// </summary>
public DateTimeOffset? Since { get; init; }
/// <summary>
/// Maximum number of records to return.
/// </summary>
public int? Limit { get; init; }
}

View File

@@ -0,0 +1,99 @@
using System;
using System.Collections.Generic;
using System.Linq;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Internal;
using StellaOps.Scheduler.Storage.Mongo.Serialization;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class AuditRepository : IAuditRepository
{
private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter;
private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort;
private readonly IMongoCollection<BsonDocument> _collection;
public AuditRepository(SchedulerMongoContext context)
{
if (context is null)
{
throw new ArgumentNullException(nameof(context));
}
_collection = context.Database.GetCollection<BsonDocument>(context.Options.AuditCollection);
}
public async Task InsertAsync(
AuditRecord record,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(record);
var document = AuditRecordDocumentMapper.ToBsonDocument(record);
if (session is null)
{
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
public async Task<IReadOnlyList<AuditRecord>> ListAsync(
string tenantId,
AuditQueryOptions? options = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
options ??= new AuditQueryOptions();
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("tenantId", tenantId)
};
if (!string.IsNullOrWhiteSpace(options.Category))
{
filters.Add(Filter.Eq("category", options.Category));
}
if (!string.IsNullOrWhiteSpace(options.ScheduleId))
{
filters.Add(Filter.Eq("scheduleId", options.ScheduleId));
}
if (!string.IsNullOrWhiteSpace(options.RunId))
{
filters.Add(Filter.Eq("runId", options.RunId));
}
if (options.Since is { } since)
{
filters.Add(Filter.Gte("occurredAt", since.ToUniversalTime().ToString("O")));
}
var combined = Filter.And(filters);
var find = session is null
? _collection.Find(combined)
: _collection.Find(session, combined);
var limit = options.Limit is { } specified && specified > 0 ? specified : 100;
var documents = await find
.Sort(Sort.Descending("occurredAt"))
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(AuditRecordDocumentMapper.FromBsonDocument).ToArray();
}
}

View File

@@ -0,0 +1,200 @@
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Internal;
using StellaOps.Scheduler.Storage.Mongo.Serialization;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class GraphJobRepository : IGraphJobRepository
{
private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter;
private readonly IMongoCollection<BsonDocument> _collection;
public GraphJobRepository(SchedulerMongoContext context)
{
ArgumentNullException.ThrowIfNull(context);
_collection = context.Database.GetCollection<BsonDocument>(context.Options.GraphJobsCollection);
}
public async Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default)
{
var document = GraphJobDocumentMapper.ToBsonDocument(job);
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
public async Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default)
{
var document = GraphJobDocumentMapper.ToBsonDocument(job);
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
public async Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default)
{
var filter = BuildIdFilter(tenantId, jobId, "build");
var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : GraphJobDocumentMapper.ToGraphBuildJob(document);
}
public async Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default)
{
var filter = BuildIdFilter(tenantId, jobId, "overlay");
var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : GraphJobDocumentMapper.ToGraphOverlayJob(document);
}
public async Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default)
{
var document = GraphJobDocumentMapper.ToBsonDocument(job);
var filter = BuildIdFilter(job.TenantId, job.Id, "build");
await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false);
return job;
}
public async Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default)
{
var document = GraphJobDocumentMapper.ToBsonDocument(job);
var filter = BuildIdFilter(job.TenantId, job.Id, "overlay");
await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false);
return job;
}
public async Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default)
{
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("tenantId", tenantId),
Filter.Eq("kind", "build")
};
if (status is { } s)
{
filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant()));
}
var filter = Filter.And(filters);
var cursor = await _collection.Find(filter)
.Sort(Builders<BsonDocument>.Sort.Descending("createdAt"))
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return cursor.Select(GraphJobDocumentMapper.ToGraphBuildJob).ToArray();
}
public async Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default)
{
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("kind", "build")
};
if (status is { } s)
{
filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant()));
}
var filter = Filter.And(filters);
var cursor = await _collection.Find(filter)
.Sort(Builders<BsonDocument>.Sort.Ascending("createdAt"))
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return cursor.Select(GraphJobDocumentMapper.ToGraphBuildJob).ToArray();
}
public async Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default)
{
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("tenantId", tenantId),
Filter.Eq("kind", "overlay")
};
if (status is { } s)
{
filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant()));
}
var filter = Filter.And(filters);
var cursor = await _collection.Find(filter)
.Sort(Builders<BsonDocument>.Sort.Descending("createdAt"))
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray();
}
public async Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default)
{
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("kind", "overlay")
};
if (status is { } s)
{
filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant()));
}
var filter = Filter.And(filters);
var cursor = await _collection.Find(filter)
.Sort(Builders<BsonDocument>.Sort.Ascending("createdAt"))
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray();
}
public async Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default)
{
var filter = Filter.And(
Filter.Eq("tenantId", tenantId),
Filter.Eq("kind", "overlay"));
var cursor = await _collection.Find(filter)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray();
}
public async Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
var filter = Filter.And(
Filter.Eq("_id", job.Id),
Filter.Eq("tenantId", job.TenantId),
Filter.Eq("kind", "build"),
Filter.Eq("status", expectedStatus.ToString().ToLowerInvariant()));
var document = GraphJobDocumentMapper.ToBsonDocument(job);
var result = await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false);
return result.MatchedCount > 0;
}
public async Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
var filter = Filter.And(
Filter.Eq("_id", job.Id),
Filter.Eq("tenantId", job.TenantId),
Filter.Eq("kind", "overlay"),
Filter.Eq("status", expectedStatus.ToString().ToLowerInvariant()));
var document = GraphJobDocumentMapper.ToBsonDocument(job);
var result = await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false);
return result.MatchedCount > 0;
}
private static FilterDefinition<BsonDocument> BuildIdFilter(string tenantId, string jobId, string kind)
=> Filter.And(
Filter.Eq("_id", jobId),
Filter.Eq("tenantId", tenantId),
Filter.Eq("kind", kind));
}

View File

@@ -0,0 +1,18 @@
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
public interface IAuditRepository
{
Task InsertAsync(
AuditRecord record,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<AuditRecord>> ListAsync(
string tenantId,
AuditQueryOptions? options = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,32 @@
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
public interface IGraphJobRepository
{
Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default);
Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default);
Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default);
Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default);
Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default);
Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default);
Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default);
Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default);
Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default);
Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default);
Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default);
Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default);
Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,22 @@
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
public interface IImpactSnapshotRepository
{
Task UpsertAsync(
ImpactSet snapshot,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<ImpactSet?> GetBySnapshotIdAsync(
string snapshotId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<ImpactSet?> GetLatestBySelectorAsync(
Selector selector,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,48 @@
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
public interface IPolicyRunJobRepository
{
Task InsertAsync(
PolicyRunJob job,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<PolicyRunJob?> GetAsync(
string tenantId,
string jobId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<PolicyRunJob?> GetByRunIdAsync(
string tenantId,
string runId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<PolicyRunJob?> LeaseAsync(
string leaseOwner,
DateTimeOffset now,
TimeSpan leaseDuration,
int maxAttempts,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<PolicyRunJob>> ListAsync(
string tenantId,
string? policyId = null,
PolicyRunMode? mode = null,
IReadOnlyCollection<PolicyRunJobStatus>? statuses = null,
DateTimeOffset? queuedAfter = null,
int limit = 50,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<bool> ReplaceAsync(
PolicyRunJob job,
string? expectedLeaseOwner = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,35 @@
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
public interface IRunRepository
{
Task InsertAsync(
Run run,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<bool> UpdateAsync(
Run run,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<Run?> GetAsync(
string tenantId,
string runId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<Run>> ListAsync(
string tenantId,
RunQueryOptions? options = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<Run>> ListByStateAsync(
RunState state,
int limit = 50,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,19 @@
using StellaOps.Scheduler.Storage.Mongo.Documents;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal interface IRunSummaryRepository
{
Task<RunSummaryDocument?> GetAsync(
string tenantId,
string scheduleId,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<RunSummaryDocument>> ListAsync(
string tenantId,
CancellationToken cancellationToken = default);
Task UpsertAsync(
RunSummaryDocument document,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,32 @@
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
public interface IScheduleRepository
{
Task UpsertAsync(
Schedule schedule,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<Schedule?> GetAsync(
string tenantId,
string scheduleId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<IReadOnlyList<Schedule>> ListAsync(
string tenantId,
ScheduleQueryOptions? options = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
Task<bool> SoftDeleteAsync(
string tenantId,
string scheduleId,
string deletedBy,
DateTimeOffset deletedAt,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,94 @@
using System;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Internal;
using StellaOps.Scheduler.Storage.Mongo.Serialization;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class ImpactSnapshotRepository : IImpactSnapshotRepository
{
private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter;
private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort;
private readonly IMongoCollection<BsonDocument> _collection;
public ImpactSnapshotRepository(SchedulerMongoContext context)
{
if (context is null)
{
throw new ArgumentNullException(nameof(context));
}
_collection = context.Database.GetCollection<BsonDocument>(context.Options.ImpactSnapshotsCollection);
}
public async Task UpsertAsync(
ImpactSet snapshot,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(snapshot);
var document = ImpactSetDocumentMapper.ToBsonDocument(snapshot);
var filter = Filter.Eq("_id", document["_id"]);
var options = new ReplaceOptions { IsUpsert = true };
if (session is null)
{
await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false);
}
}
public async Task<ImpactSet?> GetBySnapshotIdAsync(
string snapshotId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(snapshotId))
{
throw new ArgumentException("Snapshot id must be provided.", nameof(snapshotId));
}
var filter = Filter.Eq("_id", snapshotId);
var query = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : ImpactSetDocumentMapper.FromBsonDocument(document);
}
public async Task<ImpactSet?> GetLatestBySelectorAsync(
Selector selector,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(selector);
if (string.IsNullOrWhiteSpace(selector.TenantId))
{
throw new ArgumentException("Selector tenantId is required to resolve impact snapshots.", nameof(selector));
}
var digest = ImpactSetDocumentMapper.ComputeSelectorDigest(selector);
var filters = Filter.And(
Filter.Eq("selectorDigest", digest),
Filter.Eq("selector.tenantId", selector.TenantId));
var find = session is null
? _collection.Find(filters)
: _collection.Find(session, filters);
var document = await find
.Sort(Sort.Descending("generatedAt"))
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
return document is null ? null : ImpactSetDocumentMapper.FromBsonDocument(document);
}
}

View File

@@ -0,0 +1,249 @@
using System;
using System.Collections.Generic;
using System.Linq;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Internal;
using StellaOps.Scheduler.Storage.Mongo.Serialization;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class PolicyRunJobRepository : IPolicyRunJobRepository
{
private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter;
private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort;
private readonly IMongoCollection<BsonDocument> _collection;
public PolicyRunJobRepository(SchedulerMongoContext context)
{
if (context is null)
{
throw new ArgumentNullException(nameof(context));
}
_collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyJobsCollection);
}
public async Task InsertAsync(
PolicyRunJob job,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
var document = PolicyRunJobDocumentMapper.ToBsonDocument(job);
if (session is null)
{
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
public async Task<PolicyRunJob?> GetAsync(
string tenantId,
string jobId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (string.IsNullOrWhiteSpace(jobId))
{
throw new ArgumentException("Job id must be provided.", nameof(jobId));
}
var filter = Filter.And(
Filter.Eq("_id", jobId),
Filter.Eq("tenantId", tenantId));
var query = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document);
}
public async Task<PolicyRunJob?> GetByRunIdAsync(
string tenantId,
string runId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (string.IsNullOrWhiteSpace(runId))
{
throw new ArgumentException("Run id must be provided.", nameof(runId));
}
var filter = Filter.And(
Filter.Eq("tenantId", tenantId),
Filter.Eq("runId", runId));
var query = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document);
}
public async Task<PolicyRunJob?> LeaseAsync(
string leaseOwner,
DateTimeOffset now,
TimeSpan leaseDuration,
int maxAttempts,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(leaseOwner);
if (leaseDuration <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive.");
}
if (maxAttempts <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxAttempts), maxAttempts, "Max attempts must be greater than zero.");
}
var statusFilter = Filter.In("status", new BsonArray(new[] { "pending", "failed" }));
var availabilityFilter = Filter.Lte("availableAt", now.UtcDateTime);
var leaseFilter = Filter.Or(
Filter.Exists("leaseOwner", false),
Filter.Eq("leaseOwner", BsonNull.Value),
Filter.Lt("leaseExpiresAt", now.UtcDateTime));
var attemptsFilter = Filter.Lt("attemptCount", maxAttempts);
var filter = Filter.And(statusFilter, availabilityFilter, leaseFilter, attemptsFilter);
var update = Builders<BsonDocument>.Update
.Set("status", "dispatching")
.Set("leaseOwner", leaseOwner)
.Set("leaseExpiresAt", now.Add(leaseDuration).UtcDateTime)
.Set("updatedAt", now.UtcDateTime);
var options = new FindOneAndUpdateOptions<BsonDocument>
{
ReturnDocument = ReturnDocument.After,
Sort = Sort.Descending("priorityRank").Ascending("createdAt")
};
var document = session is null
? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false)
: await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false);
return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document);
}
public async Task<IReadOnlyList<PolicyRunJob>> ListAsync(
string tenantId,
string? policyId = null,
PolicyRunMode? mode = null,
IReadOnlyCollection<PolicyRunJobStatus>? statuses = null,
DateTimeOffset? queuedAfter = null,
int limit = 50,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (limit <= 0)
{
throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero.");
}
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("tenantId", tenantId)
};
if (!string.IsNullOrWhiteSpace(policyId))
{
filters.Add(Filter.Eq("policyId", policyId));
}
if (mode is not null)
{
filters.Add(Filter.Eq("mode", mode.ToString()!.ToLowerInvariant()));
}
if (statuses is { Count: > 0 })
{
var array = new BsonArray(statuses.Select(static status => status.ToString().ToLowerInvariant()));
filters.Add(Filter.In("status", array));
}
if (queuedAfter is { } since)
{
filters.Add(Filter.Gte("queuedAt", since.UtcDateTime));
}
var filter = Filter.And(filters);
var sort = Sort.Descending("queuedAt").Descending("createdAt");
var query = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var documents = await query
.Sort(sort)
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents
.Select(PolicyRunJobDocumentMapper.FromBsonDocument)
.ToList();
}
public async Task<bool> ReplaceAsync(
PolicyRunJob job,
string? expectedLeaseOwner = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("_id", job.Id),
Filter.Eq("tenantId", job.TenantId)
};
if (!string.IsNullOrEmpty(expectedLeaseOwner))
{
filters.Add(Filter.Eq("leaseOwner", expectedLeaseOwner));
}
var filter = Filter.And(filters);
var document = PolicyRunJobDocumentMapper.ToBsonDocument(job);
var options = new ReplaceOptions { IsUpsert = false };
ReplaceOneResult result;
if (session is null)
{
result = await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false);
}
else
{
result = await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false);
}
return result.MatchedCount > 0;
}
}

View File

@@ -0,0 +1,35 @@
using System.Collections.Immutable;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
/// <summary>
/// Filters applied when listing scheduler runs.
/// </summary>
public sealed class RunQueryOptions
{
/// <summary>
/// Optional schedule identifier to scope the list.
/// </summary>
public string? ScheduleId { get; init; }
/// <summary>
/// Optional set of run states to include. When empty all states are returned.
/// </summary>
public ImmutableArray<RunState> States { get; init; } = ImmutableArray<RunState>.Empty;
/// <summary>
/// Optional lower bound for creation timestamp (UTC).
/// </summary>
public DateTimeOffset? CreatedAfter { get; init; }
/// <summary>
/// Maximum number of runs to return (default 50 when unspecified).
/// </summary>
public int? Limit { get; init; }
/// <summary>
/// Sort order flag. Defaults to descending by createdAt.
/// </summary>
public bool SortAscending { get; init; }
}

View File

@@ -0,0 +1,176 @@
using System;
using System.Collections.Generic;
using System.Linq;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Internal;
using StellaOps.Scheduler.Storage.Mongo.Serialization;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class RunRepository : IRunRepository
{
private const int DefaultListLimit = 50;
private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter;
private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort;
private readonly IMongoCollection<BsonDocument> _collection;
public RunRepository(SchedulerMongoContext context)
{
if (context is null)
{
throw new ArgumentNullException(nameof(context));
}
_collection = context.Database.GetCollection<BsonDocument>(context.Options.RunsCollection);
}
public async Task InsertAsync(
Run run,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(run);
var document = RunDocumentMapper.ToBsonDocument(run);
if (session is null)
{
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
public async Task<bool> UpdateAsync(
Run run,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(run);
var document = RunDocumentMapper.ToBsonDocument(run);
var filter = Filter.And(
Filter.Eq("_id", run.Id),
Filter.Eq("tenantId", run.TenantId));
var options = new ReplaceOptions { IsUpsert = false };
ReplaceOneResult result;
if (session is null)
{
result = await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false);
}
else
{
result = await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false);
}
return result.MatchedCount > 0;
}
public async Task<Run?> GetAsync(
string tenantId,
string runId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (string.IsNullOrWhiteSpace(runId))
{
throw new ArgumentException("Run id must be provided.", nameof(runId));
}
var filter = Filter.And(
Filter.Eq("_id", runId),
Filter.Eq("tenantId", tenantId));
var query = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : RunDocumentMapper.FromBsonDocument(document);
}
public async Task<IReadOnlyList<Run>> ListAsync(
string tenantId,
RunQueryOptions? options = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
options ??= new RunQueryOptions();
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("tenantId", tenantId)
};
if (!string.IsNullOrWhiteSpace(options.ScheduleId))
{
filters.Add(Filter.Eq("scheduleId", options.ScheduleId));
}
if (options.States.Length > 0)
{
filters.Add(Filter.In("state", options.States.Select(state => state.ToString().ToLowerInvariant())));
}
if (options.CreatedAfter is { } createdAfter)
{
filters.Add(Filter.Gt("createdAt", createdAfter.ToUniversalTime().UtcDateTime));
}
var combined = Filter.And(filters);
var find = session is null
? _collection.Find(combined)
: _collection.Find(session, combined);
var limit = options.Limit is { } specified && specified > 0 ? specified : DefaultListLimit;
find = find.Limit(limit);
var sortDefinition = options.SortAscending
? Sort.Ascending("createdAt")
: Sort.Descending("createdAt");
find = find.Sort(sortDefinition);
var documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false);
return documents.Select(RunDocumentMapper.FromBsonDocument).ToArray();
}
public async Task<IReadOnlyList<Run>> ListByStateAsync(
RunState state,
int limit = 50,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (limit <= 0)
{
throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero.");
}
var filter = Filter.Eq("state", state.ToString().ToLowerInvariant());
var find = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
find = find.Sort(Sort.Ascending("createdAt"));
find = find.Limit(limit);
var documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false);
return documents.Select(RunDocumentMapper.FromBsonDocument).ToArray();
}
}

View File

@@ -0,0 +1,79 @@
using MongoDB.Driver;
using StellaOps.Scheduler.Storage.Mongo.Documents;
using StellaOps.Scheduler.Storage.Mongo.Internal;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class RunSummaryRepository : IRunSummaryRepository
{
private readonly IMongoCollection<RunSummaryDocument> _collection;
public RunSummaryRepository(SchedulerMongoContext context)
{
ArgumentNullException.ThrowIfNull(context);
_collection = context.Database.GetCollection<RunSummaryDocument>(context.Options.RunSummariesCollection);
}
public async Task<RunSummaryDocument?> GetAsync(
string tenantId,
string scheduleId,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (string.IsNullOrWhiteSpace(scheduleId))
{
throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId));
}
var filter = Builders<RunSummaryDocument>.Filter.Eq(document => document.Id, CreateDocumentId(tenantId, scheduleId));
var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document;
}
public async Task<IReadOnlyList<RunSummaryDocument>> ListAsync(
string tenantId,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
var filter = Builders<RunSummaryDocument>.Filter.Eq(document => document.TenantId, tenantId);
var sort = Builders<RunSummaryDocument>.Sort.Descending(document => document.UpdatedAt);
var documents = await _collection.Find(filter).Sort(sort).ToListAsync(cancellationToken).ConfigureAwait(false);
return documents;
}
public Task UpsertAsync(
RunSummaryDocument document,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(document);
if (string.IsNullOrWhiteSpace(document.TenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(document.TenantId));
}
if (string.IsNullOrWhiteSpace(document.ScheduleId))
{
throw new ArgumentException("Schedule id must be provided.", nameof(document.ScheduleId));
}
document.Id = CreateDocumentId(document.TenantId, document.ScheduleId);
var filter = Builders<RunSummaryDocument>.Filter.Eq(x => x.Id, document.Id);
return _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken);
}
private static string CreateDocumentId(string tenantId, string scheduleId)
=> string.Create(tenantId.Length + scheduleId.Length + 1, (tenantId, scheduleId), static (span, value) =>
{
value.tenantId.AsSpan().CopyTo(span);
span[value.tenantId.Length] = ':';
value.scheduleId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]);
});
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
/// <summary>
/// Filters applied when listing scheduler schedules.
/// </summary>
public sealed class ScheduleQueryOptions
{
/// <summary>
/// When true, returns disabled schedules; otherwise disabled entries are excluded.
/// </summary>
public bool IncludeDisabled { get; init; }
/// <summary>
/// When true, includes soft-deleted schedules; by default deleted entries are excluded.
/// </summary>
public bool IncludeDeleted { get; init; }
/// <summary>
/// Optional maximum number of schedules to return.
/// </summary>
public int? Limit { get; init; }
}

View File

@@ -0,0 +1,180 @@
using System;
using System.Collections.Generic;
using System.Linq;
using MongoDB.Bson;
using MongoDB.Driver;
using StellaOps.Scheduler.Models;
using StellaOps.Scheduler.Storage.Mongo.Internal;
using StellaOps.Scheduler.Storage.Mongo.Serialization;
namespace StellaOps.Scheduler.Storage.Mongo.Repositories;
internal sealed class ScheduleRepository : IScheduleRepository
{
private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter;
private static readonly UpdateDefinitionBuilder<BsonDocument> Update = Builders<BsonDocument>.Update;
private readonly IMongoCollection<BsonDocument> _collection;
public ScheduleRepository(SchedulerMongoContext context)
{
if (context is null)
{
throw new ArgumentNullException(nameof(context));
}
_collection = context.Database.GetCollection<BsonDocument>(context.Options.SchedulesCollection);
}
public async Task UpsertAsync(
Schedule schedule,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(schedule);
var document = ScheduleDocumentMapper.ToBsonDocument(schedule);
document.Remove("deletedAt");
document.Remove("deletedBy");
var filter = Filter.And(
Filter.Eq("_id", schedule.Id),
Filter.Eq("tenantId", schedule.TenantId));
var options = new ReplaceOptions { IsUpsert = true };
if (session is null)
{
await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false);
}
else
{
await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false);
}
}
public async Task<Schedule?> GetAsync(
string tenantId,
string scheduleId,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (string.IsNullOrWhiteSpace(scheduleId))
{
throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId));
}
var filter = Filter.And(
Filter.Eq("_id", scheduleId),
Filter.Eq("tenantId", tenantId),
Filter.Or(
Filter.Exists("deletedAt", false),
Filter.Eq("deletedAt", BsonNull.Value)));
var query = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document is null ? null : ScheduleDocumentMapper.FromBsonDocument(document);
}
public async Task<IReadOnlyList<Schedule>> ListAsync(
string tenantId,
ScheduleQueryOptions? options = null,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
options ??= new ScheduleQueryOptions();
var filters = new List<FilterDefinition<BsonDocument>>
{
Filter.Eq("tenantId", tenantId)
};
if (!options.IncludeDeleted)
{
filters.Add(Filter.Or(
Filter.Exists("deletedAt", false),
Filter.Eq("deletedAt", BsonNull.Value)));
}
if (!options.IncludeDisabled)
{
filters.Add(Filter.Eq("enabled", true));
}
var combined = Filter.And(filters);
var find = session is null
? _collection.Find(combined)
: _collection.Find(session, combined);
if (options.Limit is { } limit && limit > 0)
{
find = find.Limit(limit);
}
var documents = await find.Sort(Builders<BsonDocument>.Sort.Ascending("name"))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.Select(ScheduleDocumentMapper.FromBsonDocument).ToArray();
}
public async Task<bool> SoftDeleteAsync(
string tenantId,
string scheduleId,
string deletedBy,
DateTimeOffset deletedAt,
IClientSessionHandle? session = null,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(tenantId))
{
throw new ArgumentException("Tenant id must be provided.", nameof(tenantId));
}
if (string.IsNullOrWhiteSpace(scheduleId))
{
throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId));
}
if (string.IsNullOrWhiteSpace(deletedBy))
{
throw new ArgumentException("Deleted by must be provided.", nameof(deletedBy));
}
var filter = Filter.And(
Filter.Eq("_id", scheduleId),
Filter.Eq("tenantId", tenantId));
var utc = deletedAt.ToUniversalTime();
var update = Update
.Set("deletedAt", utc.UtcDateTime)
.Set("deletedBy", deletedBy)
.Set("enabled", false)
.Set("updatedAt", utc.UtcDateTime)
.Set("updatedBy", deletedBy);
UpdateResult result;
if (session is null)
{
result = await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false);
}
else
{
result = await _collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false);
}
return result.ModifiedCount > 0;
}
}

View File

@@ -0,0 +1,23 @@
using MongoDB.Bson;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class AuditRecordDocumentMapper
{
public static BsonDocument ToBsonDocument(AuditRecord record)
{
ArgumentNullException.ThrowIfNull(record);
var json = CanonicalJsonSerializer.Serialize(record);
var document = BsonDocument.Parse(json);
document["_id"] = record.Id;
return document;
}
public static AuditRecord FromBsonDocument(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var node = document.ToCanonicalJsonNode();
return CanonicalJsonSerializer.Deserialize<AuditRecord>(node.ToCanonicalJson());
}
}

View File

@@ -0,0 +1,144 @@
using System.Globalization;
using System.Text.Json;
using System.Text.Json.Nodes;
using MongoDB.Bson;
using MongoDB.Bson.IO;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class BsonDocumentJsonExtensions
{
public static JsonNode ToCanonicalJsonNode(this BsonDocument document, params string[] fieldsToRemove)
{
ArgumentNullException.ThrowIfNull(document);
var clone = document.DeepClone().AsBsonDocument;
clone.Remove("_id");
if (fieldsToRemove is { Length: > 0 })
{
foreach (var field in fieldsToRemove)
{
clone.Remove(field);
}
}
var json = clone.ToJson(new JsonWriterSettings
{
OutputMode = JsonOutputMode.RelaxedExtendedJson,
Indent = false,
});
var node = JsonNode.Parse(json) ?? throw new InvalidOperationException("Unable to parse BSON document JSON.");
return NormalizeExtendedJson(node);
}
private static JsonNode NormalizeExtendedJson(JsonNode node)
{
if (node is JsonObject obj)
{
if (TryConvertExtendedDate(obj, out var replacement))
{
return replacement;
}
foreach (var property in obj.ToList())
{
if (property.Value is null)
{
continue;
}
var normalized = NormalizeExtendedJson(property.Value);
if (!ReferenceEquals(normalized, property.Value))
{
obj[property.Key] = normalized;
}
}
return obj;
}
if (node is JsonArray array)
{
for (var i = 0; i < array.Count; i++)
{
if (array[i] is null)
{
continue;
}
var normalized = NormalizeExtendedJson(array[i]!);
if (!ReferenceEquals(normalized, array[i]))
{
array[i] = normalized;
}
}
return array;
}
return node;
}
private static bool TryConvertExtendedDate(JsonObject obj, out JsonNode replacement)
{
replacement = obj;
if (obj.Count != 1 || !obj.TryGetPropertyValue("$date", out var value) || value is null)
{
return false;
}
if (value is JsonValue directValue)
{
if (directValue.TryGetValue(out string? dateString) && TryParseIso(dateString, out var iso))
{
replacement = JsonValue.Create(iso);
return true;
}
if (directValue.TryGetValue(out long epochMilliseconds))
{
replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(epochMilliseconds).ToString("O"));
return true;
}
}
else if (value is JsonObject nested &&
nested.TryGetPropertyValue("$numberLong", out var numberNode) &&
numberNode is JsonValue numberValue &&
numberValue.TryGetValue(out string? numberString) &&
long.TryParse(numberString, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ms))
{
replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(ms).ToString("O"));
return true;
}
return false;
}
private static bool TryParseIso(string? value, out string iso)
{
iso = string.Empty;
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsed))
{
iso = parsed.ToUniversalTime().ToString("O");
return true;
}
return false;
}
public static string ToCanonicalJson(this JsonNode node)
{
ArgumentNullException.ThrowIfNull(node);
return node.ToJsonString(new JsonSerializerOptions
{
WriteIndented = false
});
}
}

View File

@@ -0,0 +1,125 @@
using MongoDB.Bson;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class GraphJobDocumentMapper
{
private const string PayloadField = "payload";
public static BsonDocument ToBsonDocument(GraphBuildJob job)
{
ArgumentNullException.ThrowIfNull(job);
var payloadJson = CanonicalJsonSerializer.Serialize(job);
var payloadDocument = BsonDocument.Parse(payloadJson);
var document = new BsonDocument
{
["_id"] = job.Id,
["tenantId"] = job.TenantId,
["kind"] = "build",
["status"] = job.Status.ToString().ToLowerInvariant(),
["createdAt"] = job.CreatedAt.UtcDateTime,
["attempts"] = job.Attempts,
[PayloadField] = payloadDocument
};
if (!string.IsNullOrWhiteSpace(job.GraphSnapshotId))
{
document["graphSnapshotId"] = job.GraphSnapshotId;
}
if (!string.IsNullOrWhiteSpace(job.CorrelationId))
{
document["correlationId"] = job.CorrelationId;
}
if (job.StartedAt is { } startedAt)
{
document["startedAt"] = startedAt.UtcDateTime;
}
if (job.CompletedAt is { } completedAt)
{
document["completedAt"] = completedAt.UtcDateTime;
}
if (!string.IsNullOrWhiteSpace(job.Error))
{
document["error"] = job.Error;
}
return document;
}
public static BsonDocument ToBsonDocument(GraphOverlayJob job)
{
ArgumentNullException.ThrowIfNull(job);
var payloadJson = CanonicalJsonSerializer.Serialize(job);
var payloadDocument = BsonDocument.Parse(payloadJson);
var document = new BsonDocument
{
["_id"] = job.Id,
["tenantId"] = job.TenantId,
["kind"] = "overlay",
["status"] = job.Status.ToString().ToLowerInvariant(),
["createdAt"] = job.CreatedAt.UtcDateTime,
["attempts"] = job.Attempts,
[PayloadField] = payloadDocument
};
document["graphSnapshotId"] = job.GraphSnapshotId;
document["overlayKind"] = job.OverlayKind.ToString().ToLowerInvariant();
document["overlayKey"] = job.OverlayKey;
if (!string.IsNullOrWhiteSpace(job.BuildJobId))
{
document["buildJobId"] = job.BuildJobId;
}
if (!string.IsNullOrWhiteSpace(job.CorrelationId))
{
document["correlationId"] = job.CorrelationId;
}
if (job.StartedAt is { } startedAt)
{
document["startedAt"] = startedAt.UtcDateTime;
}
if (job.CompletedAt is { } completedAt)
{
document["completedAt"] = completedAt.UtcDateTime;
}
if (!string.IsNullOrWhiteSpace(job.Error))
{
document["error"] = job.Error;
}
return document;
}
public static GraphBuildJob ToGraphBuildJob(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var payloadDocument = document[PayloadField].AsBsonDocument;
var json = payloadDocument.ToJson();
var job = CanonicalJsonSerializer.Deserialize<GraphBuildJob>(json);
return job;
}
public static GraphOverlayJob ToGraphOverlayJob(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var payloadDocument = document[PayloadField].AsBsonDocument;
var json = payloadDocument.ToJson();
var job = CanonicalJsonSerializer.Deserialize<GraphOverlayJob>(json);
return job;
}
}

View File

@@ -0,0 +1,57 @@
using System;
using System.Security.Cryptography;
using System.Text;
using MongoDB.Bson;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class ImpactSetDocumentMapper
{
private const string SelectorHashPrefix = "selector::";
public static BsonDocument ToBsonDocument(ImpactSet impactSet)
{
ArgumentNullException.ThrowIfNull(impactSet);
var json = CanonicalJsonSerializer.Serialize(impactSet);
var document = BsonDocument.Parse(json);
document["_id"] = ComputeDocumentId(impactSet);
document["selectorDigest"] = ComputeSelectorDigest(impactSet);
return document;
}
public static ImpactSet FromBsonDocument(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var node = document.ToCanonicalJsonNode();
return CanonicalJsonSerializer.Deserialize<ImpactSet>(node.ToCanonicalJson());
}
private static string ComputeDocumentId(ImpactSet impactSet)
{
if (!string.IsNullOrWhiteSpace(impactSet.SnapshotId))
{
return impactSet.SnapshotId!;
}
var selectorJson = CanonicalJsonSerializer.Serialize(impactSet.Selector);
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(selectorJson));
return SelectorHashPrefix + Convert.ToHexString(hash).ToLowerInvariant();
}
private static string ComputeSelectorDigest(ImpactSet impactSet)
{
return ComputeSelectorDigest(impactSet.Selector);
}
public static string ComputeSelectorDigest(Selector selector)
{
ArgumentNullException.ThrowIfNull(selector);
var selectorJson = CanonicalJsonSerializer.Serialize(selector);
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(selectorJson));
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,23 @@
using MongoDB.Bson;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class PolicyRunJobDocumentMapper
{
public static BsonDocument ToBsonDocument(PolicyRunJob job)
{
ArgumentNullException.ThrowIfNull(job);
var json = CanonicalJsonSerializer.Serialize(job);
var document = BsonDocument.Parse(json);
document["_id"] = job.Id;
return document;
}
public static PolicyRunJob FromBsonDocument(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var node = document.ToCanonicalJsonNode();
return CanonicalJsonSerializer.Deserialize<PolicyRunJob>(node.ToCanonicalJson());
}
}

View File

@@ -0,0 +1,23 @@
using MongoDB.Bson;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class RunDocumentMapper
{
public static BsonDocument ToBsonDocument(Run run)
{
ArgumentNullException.ThrowIfNull(run);
var json = CanonicalJsonSerializer.Serialize(run);
var document = BsonDocument.Parse(json);
document["_id"] = run.Id;
return document;
}
public static Run FromBsonDocument(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var node = document.ToCanonicalJsonNode();
return CanonicalJsonSerializer.Deserialize<Run>(node.ToCanonicalJson());
}
}

View File

@@ -0,0 +1,25 @@
using MongoDB.Bson;
using StellaOps.Scheduler.Models;
namespace StellaOps.Scheduler.Storage.Mongo.Serialization;
internal static class ScheduleDocumentMapper
{
private static readonly string[] IgnoredFields = { "deletedAt", "deletedBy" };
public static BsonDocument ToBsonDocument(Schedule schedule)
{
ArgumentNullException.ThrowIfNull(schedule);
var json = CanonicalJsonSerializer.Serialize(schedule);
var document = BsonDocument.Parse(json);
document["_id"] = schedule.Id;
return document;
}
public static Schedule FromBsonDocument(BsonDocument document)
{
ArgumentNullException.ThrowIfNull(document);
var node = document.ToCanonicalJsonNode(IgnoredFields);
return CanonicalJsonSerializer.Deserialize<Schedule>(node.ToCanonicalJson());
}
}

Some files were not shown because too many files have changed in this diff Show More