up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
This commit is contained in:
45
src/AirGap/scripts/verify-manifest.sh
Normal file
45
src/AirGap/scripts/verify-manifest.sh
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Offline verifier for AirGap manifest/bundle hashes.
|
||||
# Usage: verify-manifest.sh path/to/manifest.json path/to/bundle.tar.gz [manifest-signature.bin] [pubkey.pem]
|
||||
|
||||
manifest=${1:?manifest path required}
|
||||
bundle=${2:?bundle path required}
|
||||
sig=${3:-}
|
||||
pub=${4:-}
|
||||
|
||||
if ! command -v jq >/dev/null; then
|
||||
echo "jq is required for offline validation" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
calc_sha() {
|
||||
sha256sum "$1" | awk '{print $1}'
|
||||
}
|
||||
|
||||
manifest_hash=$(calc_sha "$manifest")
|
||||
expected_manifest_hash=$(jq -r '.hashes.manifestSha256' "$manifest")
|
||||
|
||||
if [[ "$manifest_hash" != "$expected_manifest_hash" ]]; then
|
||||
echo "manifest hash mismatch: got $manifest_hash expected $expected_manifest_hash" >&2
|
||||
exit 3
|
||||
fi
|
||||
|
||||
bundle_hash=$(calc_sha "$bundle")
|
||||
expected_bundle_hash=$(jq -r '.hashes.bundleSha256' "$manifest")
|
||||
|
||||
if [[ "$bundle_hash" != "$expected_bundle_hash" ]]; then
|
||||
echo "bundle hash mismatch: got $bundle_hash expected $expected_bundle_hash" >&2
|
||||
exit 4
|
||||
fi
|
||||
|
||||
if [[ -n "$sig" && -n "$pub" ]]; then
|
||||
if ! command -v openssl >/dev/null; then
|
||||
echo "openssl required for signature verification" >&2
|
||||
exit 5
|
||||
fi
|
||||
openssl dgst -sha256 -verify "$pub" -signature "$sig" "$manifest" >/dev/null
|
||||
fi
|
||||
|
||||
echo "Manifest and bundle hashes verified${sig:+; signature verified}."
|
||||
@@ -14,7 +14,7 @@ public sealed class AuthorityDataSource : DataSourceBase
|
||||
/// <summary>
|
||||
/// Default schema name for Authority tables.
|
||||
/// </summary>
|
||||
public const string DefaultSchemaName = "auth";
|
||||
public const string DefaultSchemaName = "authority";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new Authority data source.
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Backfill;
|
||||
|
||||
/// <summary>
|
||||
/// Performs one-way backfill from the secondary (legacy) store into the primary PostgreSQL store.
|
||||
/// </summary>
|
||||
public sealed class AuthorityBackfillService
|
||||
{
|
||||
private readonly ITokenRepository _primaryTokens;
|
||||
private readonly ISecondaryTokenRepository _secondaryTokens;
|
||||
private readonly IRefreshTokenRepository _primaryRefreshTokens;
|
||||
private readonly ISecondaryRefreshTokenRepository _secondaryRefreshTokens;
|
||||
private readonly IUserRepository _primaryUsers;
|
||||
private readonly ISecondaryUserRepository _secondaryUsers;
|
||||
private readonly ILogger<AuthorityBackfillService> _logger;
|
||||
|
||||
public AuthorityBackfillService(
|
||||
ITokenRepository primaryTokens,
|
||||
ISecondaryTokenRepository secondaryTokens,
|
||||
IRefreshTokenRepository primaryRefreshTokens,
|
||||
ISecondaryRefreshTokenRepository secondaryRefreshTokens,
|
||||
IUserRepository primaryUsers,
|
||||
ISecondaryUserRepository secondaryUsers,
|
||||
ILogger<AuthorityBackfillService> logger)
|
||||
{
|
||||
_primaryTokens = primaryTokens;
|
||||
_secondaryTokens = secondaryTokens;
|
||||
_primaryRefreshTokens = primaryRefreshTokens;
|
||||
_secondaryRefreshTokens = secondaryRefreshTokens;
|
||||
_primaryUsers = primaryUsers;
|
||||
_secondaryUsers = secondaryUsers;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<BackfillResult> BackfillAsync(string tenantId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var users = await _secondaryUsers.GetAllAsync(tenantId, null, int.MaxValue, 0, cancellationToken).ConfigureAwait(false);
|
||||
var tokensCopied = 0;
|
||||
var tokensSkipped = 0;
|
||||
var refreshCopied = 0;
|
||||
var refreshSkipped = 0;
|
||||
var primaryTokensSnapshot = new List<TokenEntity>();
|
||||
var secondaryTokensSnapshot = new List<TokenEntity>();
|
||||
var primaryRefreshSnapshot = new List<RefreshTokenEntity>();
|
||||
var secondaryRefreshSnapshot = new List<RefreshTokenEntity>();
|
||||
|
||||
foreach (var user in users)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var primaryUser = await _primaryUsers.GetByIdAsync(tenantId, user.Id, cancellationToken).ConfigureAwait(false);
|
||||
if (primaryUser is null)
|
||||
{
|
||||
await _primaryUsers.CreateAsync(user, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var secondaryTokens = await _secondaryTokens.GetByUserIdAsync(tenantId, user.Id, cancellationToken).ConfigureAwait(false);
|
||||
var primaryTokens = await _primaryTokens.GetByUserIdAsync(tenantId, user.Id, cancellationToken).ConfigureAwait(false);
|
||||
primaryTokensSnapshot.AddRange(primaryTokens);
|
||||
secondaryTokensSnapshot.AddRange(secondaryTokens);
|
||||
foreach (var token in secondaryTokens)
|
||||
{
|
||||
if (await _primaryTokens.GetByIdAsync(tenantId, token.Id, cancellationToken).ConfigureAwait(false) is null)
|
||||
{
|
||||
await _primaryTokens.CreateAsync(tenantId, token, cancellationToken).ConfigureAwait(false);
|
||||
primaryTokensSnapshot.Add(token);
|
||||
tokensCopied++;
|
||||
}
|
||||
else
|
||||
{
|
||||
tokensSkipped++;
|
||||
}
|
||||
}
|
||||
|
||||
var secondaryRefreshTokens = await _secondaryRefreshTokens.GetByUserIdAsync(tenantId, user.Id, cancellationToken).ConfigureAwait(false);
|
||||
var primaryRefreshTokens = await _primaryRefreshTokens.GetByUserIdAsync(tenantId, user.Id, cancellationToken).ConfigureAwait(false);
|
||||
primaryRefreshSnapshot.AddRange(primaryRefreshTokens);
|
||||
secondaryRefreshSnapshot.AddRange(secondaryRefreshTokens);
|
||||
foreach (var refresh in secondaryRefreshTokens)
|
||||
{
|
||||
if (await _primaryRefreshTokens.GetByIdAsync(tenantId, refresh.Id, cancellationToken).ConfigureAwait(false) is null)
|
||||
{
|
||||
await _primaryRefreshTokens.CreateAsync(tenantId, refresh, cancellationToken).ConfigureAwait(false);
|
||||
primaryRefreshSnapshot.Add(refresh);
|
||||
refreshCopied++;
|
||||
}
|
||||
else
|
||||
{
|
||||
refreshSkipped++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var secondaryChecksum = ComputeChecksums(secondaryTokensSnapshot, secondaryRefreshSnapshot);
|
||||
var primaryChecksum = ComputeChecksums(primaryTokensSnapshot, primaryRefreshSnapshot);
|
||||
|
||||
return new BackfillResult(
|
||||
tenantId,
|
||||
users.Count,
|
||||
tokensCopied,
|
||||
tokensSkipped,
|
||||
refreshCopied,
|
||||
refreshSkipped,
|
||||
primaryChecksum,
|
||||
secondaryChecksum);
|
||||
}
|
||||
|
||||
private static BackfillChecksum ComputeChecksums(
|
||||
IReadOnlyCollection<TokenEntity> tokens,
|
||||
IReadOnlyCollection<RefreshTokenEntity> refreshTokens)
|
||||
{
|
||||
var tokenHash = ComputeHash(tokens.Select(t =>
|
||||
$"{t.Id}|{t.TenantId}|{t.UserId}|{t.TokenHash}|{t.TokenType}|{t.ExpiresAt.UtcDateTime:o}|{t.RevokedAt?.UtcDateTime:o}|{t.RevokedBy}|{string.Join(',', t.Scopes)}"));
|
||||
var refreshHash = ComputeHash(refreshTokens.Select(t =>
|
||||
$"{t.Id}|{t.TenantId}|{t.UserId}|{t.TokenHash}|{t.AccessTokenId}|{t.ClientId}|{t.ExpiresAt.UtcDateTime:o}|{t.RevokedAt?.UtcDateTime:o}|{t.RevokedBy}|{t.ReplacedBy}"));
|
||||
|
||||
return new BackfillChecksum(tokens.Count, refreshTokens.Count, tokenHash, refreshHash);
|
||||
}
|
||||
|
||||
private static string ComputeHash(IEnumerable<string> lines)
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
foreach (var line in lines.OrderBy(l => l, StringComparer.Ordinal))
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(line);
|
||||
sha.TransformBlock(bytes, 0, bytes.Length, null, 0);
|
||||
}
|
||||
sha.TransformFinalBlock(Array.Empty<byte>(), 0, 0);
|
||||
return Convert.ToHexString(sha.Hash ?? Array.Empty<byte>());
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record BackfillChecksum(int TokenCount, int RefreshTokenCount, string TokenChecksum, string RefreshTokenChecksum);
|
||||
|
||||
public sealed record BackfillResult(
|
||||
string TenantId,
|
||||
int UsersProcessed,
|
||||
int TokensCopied,
|
||||
int TokensSkipped,
|
||||
int RefreshTokensCopied,
|
||||
int RefreshTokensSkipped,
|
||||
BackfillChecksum PrimaryChecksum,
|
||||
BackfillChecksum SecondaryChecksum)
|
||||
{
|
||||
public bool ChecksumsMatch =>
|
||||
PrimaryChecksum.TokenChecksum == SecondaryChecksum.TokenChecksum &&
|
||||
PrimaryChecksum.RefreshTokenChecksum == SecondaryChecksum.RefreshTokenChecksum &&
|
||||
PrimaryChecksum.TokenCount == SecondaryChecksum.TokenCount &&
|
||||
PrimaryChecksum.RefreshTokenCount == SecondaryChecksum.RefreshTokenCount;
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Threading;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// Captures counters for dual-write operations to aid verification during cutover.
|
||||
/// </summary>
|
||||
public sealed class DualWriteMetrics : IDisposable
|
||||
{
|
||||
private readonly Meter _meter = new("StellaOps.Authority.Storage.Postgres.DualWrite", "1.0.0");
|
||||
private readonly Counter<long> _primaryWrites;
|
||||
private readonly Counter<long> _secondaryWrites;
|
||||
private readonly Counter<long> _secondaryWriteFailures;
|
||||
private readonly Counter<long> _fallbackReads;
|
||||
|
||||
public DualWriteMetrics()
|
||||
{
|
||||
_primaryWrites = _meter.CreateCounter<long>("authority.dualwrite.primary.writes");
|
||||
_secondaryWrites = _meter.CreateCounter<long>("authority.dualwrite.secondary.writes");
|
||||
_secondaryWriteFailures = _meter.CreateCounter<long>("authority.dualwrite.secondary.write.failures");
|
||||
_fallbackReads = _meter.CreateCounter<long>("authority.dualwrite.fallback.reads");
|
||||
}
|
||||
|
||||
public void RecordPrimaryWrite() => _primaryWrites.Add(1);
|
||||
public void RecordSecondaryWrite() => _secondaryWrites.Add(1);
|
||||
public void RecordSecondaryWriteFailure() => _secondaryWriteFailures.Add(1);
|
||||
public void RecordFallbackRead() => _fallbackReads.Add(1);
|
||||
|
||||
public void Dispose() => _meter.Dispose();
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
using System.Diagnostics.CodeAnalysis;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// Options controlling dual-write behaviour during Mongo → PostgreSQL cutover.
|
||||
/// </summary>
|
||||
public sealed class DualWriteOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether dual-write is enabled. When false, repositories run primary-only.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When true, write operations are attempted against both primary and secondary repositories.
|
||||
/// </summary>
|
||||
public bool WriteSecondary { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// When true, reads will fall back to the secondary repository if the primary has no result.
|
||||
/// </summary>
|
||||
public bool FallbackToSecondary { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// When true, any secondary write failure is logged but does not throw; primary success is preserved.
|
||||
/// </summary>
|
||||
public bool LogSecondaryFailuresOnly { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// When true, secondary write/read failures propagate to callers.
|
||||
/// </summary>
|
||||
public bool FailFastOnSecondary { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional tag describing which backend is primary (for metrics/logging only).
|
||||
/// </summary>
|
||||
[AllowNull]
|
||||
public string PrimaryBackend { get; set; } = "Postgres";
|
||||
|
||||
/// <summary>
|
||||
/// Optional tag describing which backend is secondary (for metrics/logging only).
|
||||
/// </summary>
|
||||
[AllowNull]
|
||||
public string? SecondaryBackend { get; set; } = "Mongo";
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Decorator that writes refresh tokens to both primary and secondary stores during cutover.
|
||||
/// </summary>
|
||||
public sealed class DualWriteRefreshTokenRepository : IRefreshTokenRepository
|
||||
{
|
||||
private readonly IRefreshTokenRepository _primary;
|
||||
private readonly ISecondaryRefreshTokenRepository _secondary;
|
||||
private readonly DualWriteOptions _options;
|
||||
private readonly DualWriteMetrics _metrics;
|
||||
private readonly ILogger<DualWriteRefreshTokenRepository> _logger;
|
||||
|
||||
public DualWriteRefreshTokenRepository(
|
||||
IRefreshTokenRepository primary,
|
||||
ISecondaryRefreshTokenRepository secondary,
|
||||
IOptions<DualWriteOptions> options,
|
||||
DualWriteMetrics metrics,
|
||||
ILogger<DualWriteRefreshTokenRepository> logger)
|
||||
{
|
||||
_primary = primary;
|
||||
_secondary = secondary;
|
||||
_options = options.Value;
|
||||
_metrics = metrics;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<RefreshTokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var primary = await _primary.GetByIdAsync(tenantId, id, cancellationToken).ConfigureAwait(false);
|
||||
if (primary is not null || !_options.FallbackToSecondary)
|
||||
{
|
||||
return primary;
|
||||
}
|
||||
|
||||
var secondary = await SafeSecondaryCall(() => _secondary.GetByIdAsync(tenantId, id, cancellationToken)).ConfigureAwait(false);
|
||||
if (secondary is not null)
|
||||
{
|
||||
_metrics.RecordFallbackRead();
|
||||
_logger.LogInformation("Dual-write fallback refresh token hit for tenant {TenantId} token {TokenId}", tenantId, id);
|
||||
}
|
||||
|
||||
return secondary;
|
||||
}
|
||||
|
||||
public async Task<RefreshTokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var primary = await _primary.GetByHashAsync(tokenHash, cancellationToken).ConfigureAwait(false);
|
||||
if (primary is not null || !_options.FallbackToSecondary)
|
||||
{
|
||||
return primary;
|
||||
}
|
||||
|
||||
var secondary = await SafeSecondaryCall(() => _secondary.GetByHashAsync(tokenHash, cancellationToken)).ConfigureAwait(false);
|
||||
if (secondary is not null)
|
||||
{
|
||||
_metrics.RecordFallbackRead();
|
||||
_logger.LogInformation("Dual-write fallback refresh token hash hit for {Hash}", tokenHash);
|
||||
}
|
||||
|
||||
return secondary;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<RefreshTokenEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var primary = await _primary.GetByUserIdAsync(tenantId, userId, cancellationToken).ConfigureAwait(false);
|
||||
if (primary.Count > 0 || !_options.FallbackToSecondary)
|
||||
{
|
||||
return primary;
|
||||
}
|
||||
|
||||
var secondary = await SafeSecondaryCall(() => _secondary.GetByUserIdAsync(tenantId, userId, cancellationToken)).ConfigureAwait(false);
|
||||
if (secondary.Count > 0)
|
||||
{
|
||||
_metrics.RecordFallbackRead();
|
||||
_logger.LogInformation("Dual-write fallback refresh tokens for tenant {TenantId} user {UserId}", tenantId, userId);
|
||||
}
|
||||
|
||||
return secondary;
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, RefreshTokenEntity token, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var id = await _primary.CreateAsync(tenantId, token, cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(async () =>
|
||||
{
|
||||
await _secondary.CreateAsync(tenantId, token, cancellationToken).ConfigureAwait(false);
|
||||
}, tenantId, token.Id);
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
public async Task RevokeAsync(string tenantId, Guid id, string revokedBy, Guid? replacedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _primary.RevokeAsync(tenantId, id, revokedBy, replacedBy, cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(() => _secondary.RevokeAsync(tenantId, id, revokedBy, replacedBy, cancellationToken), tenantId, id);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RevokeByUserIdAsync(string tenantId, Guid userId, string revokedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _primary.RevokeByUserIdAsync(tenantId, userId, revokedBy, cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(() => _secondary.RevokeByUserIdAsync(tenantId, userId, revokedBy, cancellationToken), tenantId, userId);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task DeleteExpiredAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _primary.DeleteExpiredAsync(cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(() => _secondary.DeleteExpiredAsync(cancellationToken), tenantId: "system", id: Guid.Empty);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<T> SafeSecondaryCall<T>(Func<Task<T>> call)
|
||||
{
|
||||
try
|
||||
{
|
||||
return await call().ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Dual-write secondary refresh read failed for backend {Backend}", _options.SecondaryBackend);
|
||||
if (_options.FailFastOnSecondary && !_options.LogSecondaryFailuresOnly)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
return default!;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task SafeSecondaryWrite(Func<Task> call, string tenantId, Guid id)
|
||||
{
|
||||
try
|
||||
{
|
||||
await call().ConfigureAwait(false);
|
||||
_metrics.RecordSecondaryWrite();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_metrics.RecordSecondaryWriteFailure();
|
||||
_logger.LogWarning(ex,
|
||||
"Dual-write secondary refresh write failed for tenant {TenantId}, id {Id}, primary={Primary}, secondary={Secondary}",
|
||||
tenantId,
|
||||
id,
|
||||
_options.PrimaryBackend,
|
||||
_options.SecondaryBackend);
|
||||
|
||||
if (_options.FailFastOnSecondary && !_options.LogSecondaryFailuresOnly)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Decorator that writes to both primary (PostgreSQL) and secondary (legacy/Mongo) stores during cutover.
|
||||
/// </summary>
|
||||
public sealed class DualWriteTokenRepository : ITokenRepository
|
||||
{
|
||||
private readonly ITokenRepository _primary;
|
||||
private readonly ISecondaryTokenRepository _secondary;
|
||||
private readonly DualWriteOptions _options;
|
||||
private readonly DualWriteMetrics _metrics;
|
||||
private readonly ILogger<DualWriteTokenRepository> _logger;
|
||||
|
||||
public DualWriteTokenRepository(
|
||||
ITokenRepository primary,
|
||||
ISecondaryTokenRepository secondary,
|
||||
IOptions<DualWriteOptions> options,
|
||||
DualWriteMetrics metrics,
|
||||
ILogger<DualWriteTokenRepository> logger)
|
||||
{
|
||||
_primary = primary;
|
||||
_secondary = secondary;
|
||||
_options = options.Value;
|
||||
_metrics = metrics;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<TokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var primary = await _primary.GetByIdAsync(tenantId, id, cancellationToken).ConfigureAwait(false);
|
||||
if (primary is not null || !_options.FallbackToSecondary)
|
||||
{
|
||||
return primary;
|
||||
}
|
||||
|
||||
var secondary = await SafeSecondaryCall(() => _secondary.GetByIdAsync(tenantId, id, cancellationToken)).ConfigureAwait(false);
|
||||
if (secondary is not null)
|
||||
{
|
||||
_metrics.RecordFallbackRead();
|
||||
_logger.LogInformation("Dual-write fallback token hit for tenant {TenantId} token {TokenId}", tenantId, id);
|
||||
}
|
||||
|
||||
return secondary;
|
||||
}
|
||||
|
||||
public async Task<TokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var primary = await _primary.GetByHashAsync(tokenHash, cancellationToken).ConfigureAwait(false);
|
||||
if (primary is not null || !_options.FallbackToSecondary)
|
||||
{
|
||||
return primary;
|
||||
}
|
||||
|
||||
var secondary = await SafeSecondaryCall(() => _secondary.GetByHashAsync(tokenHash, cancellationToken)).ConfigureAwait(false);
|
||||
if (secondary is not null)
|
||||
{
|
||||
_metrics.RecordFallbackRead();
|
||||
_logger.LogInformation("Dual-write fallback token hash hit for {Hash}", tokenHash);
|
||||
}
|
||||
|
||||
return secondary;
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<TokenEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var primary = await _primary.GetByUserIdAsync(tenantId, userId, cancellationToken).ConfigureAwait(false);
|
||||
if (primary.Count > 0 || !_options.FallbackToSecondary)
|
||||
{
|
||||
return primary;
|
||||
}
|
||||
|
||||
var secondary = await SafeSecondaryCall(() => _secondary.GetByUserIdAsync(tenantId, userId, cancellationToken)).ConfigureAwait(false);
|
||||
if (secondary.Count > 0)
|
||||
{
|
||||
_metrics.RecordFallbackRead();
|
||||
_logger.LogInformation("Dual-write fallback tokens for tenant {TenantId} user {UserId}", tenantId, userId);
|
||||
}
|
||||
|
||||
return secondary;
|
||||
}
|
||||
|
||||
public async Task<Guid> CreateAsync(string tenantId, TokenEntity token, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var id = await _primary.CreateAsync(tenantId, token, cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(async () =>
|
||||
{
|
||||
await _secondary.CreateAsync(tenantId, token, cancellationToken).ConfigureAwait(false);
|
||||
}, tenantId, token.Id);
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
public async Task RevokeAsync(string tenantId, Guid id, string revokedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _primary.RevokeAsync(tenantId, id, revokedBy, cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(() => _secondary.RevokeAsync(tenantId, id, revokedBy, cancellationToken), tenantId, id);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RevokeByUserIdAsync(string tenantId, Guid userId, string revokedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _primary.RevokeByUserIdAsync(tenantId, userId, revokedBy, cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(() => _secondary.RevokeByUserIdAsync(tenantId, userId, revokedBy, cancellationToken), tenantId, userId);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task DeleteExpiredAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await _primary.DeleteExpiredAsync(cancellationToken).ConfigureAwait(false);
|
||||
_metrics.RecordPrimaryWrite();
|
||||
|
||||
if (_options.WriteSecondary)
|
||||
{
|
||||
await SafeSecondaryWrite(() => _secondary.DeleteExpiredAsync(cancellationToken), tenantId: "system", id: Guid.Empty);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<T> SafeSecondaryCall<T>(Func<Task<T>> call)
|
||||
{
|
||||
try
|
||||
{
|
||||
return await call().ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Dual-write secondary read failed for backend {Backend}", _options.SecondaryBackend);
|
||||
if (_options.FailFastOnSecondary && !_options.LogSecondaryFailuresOnly)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
return default!;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task SafeSecondaryWrite(Func<Task> call, string tenantId, Guid id)
|
||||
{
|
||||
try
|
||||
{
|
||||
await call().ConfigureAwait(false);
|
||||
_metrics.RecordSecondaryWrite();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_metrics.RecordSecondaryWriteFailure();
|
||||
_logger.LogWarning(ex,
|
||||
"Dual-write secondary write failed for tenant {TenantId}, id {Id}, primary={Primary}, secondary={Secondary}",
|
||||
tenantId,
|
||||
id,
|
||||
_options.PrimaryBackend,
|
||||
_options.SecondaryBackend);
|
||||
|
||||
if (_options.FailFastOnSecondary && !_options.LogSecondaryFailuresOnly)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,106 @@
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Marker interface for secondary (legacy/Mongo) token repository.
|
||||
/// </summary>
|
||||
public interface ISecondaryTokenRepository : ITokenRepository { }
|
||||
|
||||
/// <summary>
|
||||
/// Marker interface for secondary refresh token repository.
|
||||
/// </summary>
|
||||
public interface ISecondaryRefreshTokenRepository : IRefreshTokenRepository { }
|
||||
|
||||
/// <summary>
|
||||
/// Marker interface for secondary user repository.
|
||||
/// </summary>
|
||||
public interface ISecondaryUserRepository : IUserRepository { }
|
||||
|
||||
/// <summary>
|
||||
/// No-op secondary token repository used when dual-write is enabled without a configured secondary backend.
|
||||
/// </summary>
|
||||
internal sealed class NullSecondaryTokenRepository : ISecondaryTokenRepository
|
||||
{
|
||||
public Task<TokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<TokenEntity?>(null);
|
||||
|
||||
public Task<TokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<TokenEntity?>(null);
|
||||
|
||||
public Task<IReadOnlyList<TokenEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<IReadOnlyList<TokenEntity>>(Array.Empty<TokenEntity>());
|
||||
|
||||
public Task<Guid> CreateAsync(string tenantId, TokenEntity token, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(token.Id == Guid.Empty ? Guid.NewGuid() : token.Id);
|
||||
|
||||
public Task RevokeAsync(string tenantId, Guid id, string revokedBy, CancellationToken cancellationToken = default) =>
|
||||
Task.CompletedTask;
|
||||
|
||||
public Task RevokeByUserIdAsync(string tenantId, Guid userId, string revokedBy, CancellationToken cancellationToken = default) =>
|
||||
Task.CompletedTask;
|
||||
|
||||
public Task DeleteExpiredAsync(CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// No-op secondary refresh token repository used when dual-write is enabled without a configured secondary backend.
|
||||
/// </summary>
|
||||
internal sealed class NullSecondaryRefreshTokenRepository : ISecondaryRefreshTokenRepository
|
||||
{
|
||||
public Task<RefreshTokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<RefreshTokenEntity?>(null);
|
||||
|
||||
public Task<RefreshTokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<RefreshTokenEntity?>(null);
|
||||
|
||||
public Task<IReadOnlyList<RefreshTokenEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<IReadOnlyList<RefreshTokenEntity>>(Array.Empty<RefreshTokenEntity>());
|
||||
|
||||
public Task<Guid> CreateAsync(string tenantId, RefreshTokenEntity token, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(token.Id == Guid.Empty ? Guid.NewGuid() : token.Id);
|
||||
|
||||
public Task RevokeAsync(string tenantId, Guid id, string revokedBy, Guid? replacedBy, CancellationToken cancellationToken = default) =>
|
||||
Task.CompletedTask;
|
||||
|
||||
public Task RevokeByUserIdAsync(string tenantId, Guid userId, string revokedBy, CancellationToken cancellationToken = default) =>
|
||||
Task.CompletedTask;
|
||||
|
||||
public Task DeleteExpiredAsync(CancellationToken cancellationToken = default) => Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// No-op secondary user repository used when dual-write is enabled without a configured secondary backend.
|
||||
/// </summary>
|
||||
internal sealed class NullSecondaryUserRepository : ISecondaryUserRepository
|
||||
{
|
||||
public Task<UserEntity> CreateAsync(UserEntity user, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(user);
|
||||
|
||||
public Task<UserEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<UserEntity?>(null);
|
||||
|
||||
public Task<UserEntity?> GetByUsernameAsync(string tenantId, string username, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<UserEntity?>(null);
|
||||
|
||||
public Task<UserEntity?> GetByEmailAsync(string tenantId, string email, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<UserEntity?>(null);
|
||||
|
||||
public Task<IReadOnlyList<UserEntity>> GetAllAsync(string tenantId, bool? enabled = null, int limit = 100, int offset = 0, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult<IReadOnlyList<UserEntity>>(Array.Empty<UserEntity>());
|
||||
|
||||
public Task<bool> UpdateAsync(UserEntity user, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(false);
|
||||
|
||||
public Task<bool> DeleteAsync(string tenantId, Guid id, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(false);
|
||||
|
||||
public Task<bool> UpdatePasswordAsync(string tenantId, Guid userId, string passwordHash, string passwordSalt, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(false);
|
||||
|
||||
public Task<int> RecordFailedLoginAsync(string tenantId, Guid userId, DateTimeOffset? lockUntil = null, CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(0);
|
||||
|
||||
public Task RecordSuccessfulLoginAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default) =>
|
||||
Task.CompletedTask;
|
||||
}
|
||||
@@ -24,7 +24,7 @@ public sealed class TenantRepository : RepositoryBase<AuthorityDataSource>, ITen
|
||||
public async Task<TenantEntity> CreateAsync(TenantEntity tenant, CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = """
|
||||
INSERT INTO auth.tenants (id, slug, name, description, contact_email, enabled, settings, metadata, created_by)
|
||||
INSERT INTO authority.tenants (id, slug, name, description, contact_email, enabled, settings, metadata, created_by)
|
||||
VALUES (@id, @slug, @name, @description, @contact_email, @enabled, @settings::jsonb, @metadata::jsonb, @created_by)
|
||||
RETURNING id, slug, name, description, contact_email, enabled, settings::text, metadata::text, created_at, updated_at, created_by
|
||||
""";
|
||||
@@ -53,7 +53,7 @@ public sealed class TenantRepository : RepositoryBase<AuthorityDataSource>, ITen
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, slug, name, description, contact_email, enabled, settings::text, metadata::text, created_at, updated_at, created_by
|
||||
FROM auth.tenants
|
||||
FROM authority.tenants
|
||||
WHERE id = @id
|
||||
""";
|
||||
|
||||
@@ -70,7 +70,7 @@ public sealed class TenantRepository : RepositoryBase<AuthorityDataSource>, ITen
|
||||
{
|
||||
const string sql = """
|
||||
SELECT id, slug, name, description, contact_email, enabled, settings::text, metadata::text, created_at, updated_at, created_by
|
||||
FROM auth.tenants
|
||||
FROM authority.tenants
|
||||
WHERE slug = @slug
|
||||
""";
|
||||
|
||||
@@ -91,7 +91,7 @@ public sealed class TenantRepository : RepositoryBase<AuthorityDataSource>, ITen
|
||||
{
|
||||
var sql = """
|
||||
SELECT id, slug, name, description, contact_email, enabled, settings::text, metadata::text, created_at, updated_at, created_by
|
||||
FROM auth.tenants
|
||||
FROM authority.tenants
|
||||
""";
|
||||
|
||||
if (enabled.HasValue)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres.Backfill;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using StellaOps.Infrastructure.Postgres;
|
||||
using StellaOps.Infrastructure.Postgres.Options;
|
||||
@@ -24,19 +28,11 @@ public static class ServiceCollectionExtensions
|
||||
string sectionName = "Postgres:Authority")
|
||||
{
|
||||
services.Configure<PostgresOptions>(sectionName, configuration.GetSection(sectionName));
|
||||
services.AddSingleton<AuthorityDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddScoped<ITenantRepository, TenantRepository>();
|
||||
services.AddScoped<IUserRepository, UserRepository>();
|
||||
services.AddScoped<IRoleRepository, RoleRepository>();
|
||||
services.AddScoped<IPermissionRepository, PermissionRepository>();
|
||||
services.AddScoped<ITokenRepository, TokenRepository>();
|
||||
services.AddScoped<IRefreshTokenRepository, RefreshTokenRepository>();
|
||||
services.AddScoped<IApiKeyRepository, ApiKeyRepository>();
|
||||
services.AddScoped<ISessionRepository, SessionRepository>();
|
||||
services.AddScoped<IAuditRepository, AuditRepository>();
|
||||
var dualWriteSection = configuration.GetSection($"{sectionName}:DualWrite");
|
||||
services.Configure<DualWriteOptions>(dualWriteSection);
|
||||
var dualWriteEnabled = dualWriteSection.GetValue<bool>("Enabled");
|
||||
|
||||
RegisterAuthorityServices(services, dualWriteEnabled);
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -51,19 +47,62 @@ public static class ServiceCollectionExtensions
|
||||
Action<PostgresOptions> configureOptions)
|
||||
{
|
||||
services.Configure(configureOptions);
|
||||
services.AddSingleton<AuthorityDataSource>();
|
||||
|
||||
// Register repositories
|
||||
services.AddScoped<ITenantRepository, TenantRepository>();
|
||||
services.AddScoped<IUserRepository, UserRepository>();
|
||||
services.AddScoped<IRoleRepository, RoleRepository>();
|
||||
services.AddScoped<IPermissionRepository, PermissionRepository>();
|
||||
services.AddScoped<ITokenRepository, TokenRepository>();
|
||||
services.AddScoped<IRefreshTokenRepository, RefreshTokenRepository>();
|
||||
services.AddScoped<IApiKeyRepository, ApiKeyRepository>();
|
||||
services.AddScoped<ISessionRepository, SessionRepository>();
|
||||
services.AddScoped<IAuditRepository, AuditRepository>();
|
||||
|
||||
RegisterAuthorityServices(services, dualWriteEnabled: false);
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void RegisterAuthorityServices(IServiceCollection services, bool dualWriteEnabled)
|
||||
{
|
||||
services.AddSingleton<AuthorityDataSource>();
|
||||
services.AddSingleton<DualWriteMetrics>();
|
||||
|
||||
// Primary repositories
|
||||
services.AddScoped<TenantRepository>();
|
||||
services.AddScoped<UserRepository>();
|
||||
services.AddScoped<RoleRepository>();
|
||||
services.AddScoped<PermissionRepository>();
|
||||
services.AddScoped<TokenRepository>();
|
||||
services.AddScoped<RefreshTokenRepository>();
|
||||
services.AddScoped<ApiKeyRepository>();
|
||||
services.AddScoped<SessionRepository>();
|
||||
services.AddScoped<AuditRepository>();
|
||||
|
||||
// Default interface bindings
|
||||
services.AddScoped<ITenantRepository>(sp => sp.GetRequiredService<TenantRepository>());
|
||||
services.AddScoped<IUserRepository>(sp => sp.GetRequiredService<UserRepository>());
|
||||
services.AddScoped<IRoleRepository>(sp => sp.GetRequiredService<RoleRepository>());
|
||||
services.AddScoped<IPermissionRepository>(sp => sp.GetRequiredService<PermissionRepository>());
|
||||
services.AddScoped<IApiKeyRepository>(sp => sp.GetRequiredService<ApiKeyRepository>());
|
||||
services.AddScoped<ISessionRepository>(sp => sp.GetRequiredService<SessionRepository>());
|
||||
services.AddScoped<IAuditRepository>(sp => sp.GetRequiredService<AuditRepository>());
|
||||
|
||||
if (dualWriteEnabled)
|
||||
{
|
||||
services.TryAddScoped<ISecondaryTokenRepository, NullSecondaryTokenRepository>();
|
||||
services.TryAddScoped<ISecondaryRefreshTokenRepository, NullSecondaryRefreshTokenRepository>();
|
||||
services.TryAddScoped<ISecondaryUserRepository, NullSecondaryUserRepository>();
|
||||
|
||||
services.AddScoped<ITokenRepository>(sp => new DualWriteTokenRepository(
|
||||
sp.GetRequiredService<TokenRepository>(),
|
||||
sp.GetRequiredService<ISecondaryTokenRepository>(),
|
||||
sp.GetRequiredService<IOptions<DualWriteOptions>>(),
|
||||
sp.GetRequiredService<DualWriteMetrics>(),
|
||||
sp.GetRequiredService<ILogger<DualWriteTokenRepository>>()));
|
||||
|
||||
services.AddScoped<IRefreshTokenRepository>(sp => new DualWriteRefreshTokenRepository(
|
||||
sp.GetRequiredService<RefreshTokenRepository>(),
|
||||
sp.GetRequiredService<ISecondaryRefreshTokenRepository>(),
|
||||
sp.GetRequiredService<IOptions<DualWriteOptions>>(),
|
||||
sp.GetRequiredService<DualWriteMetrics>(),
|
||||
sp.GetRequiredService<ILogger<DualWriteRefreshTokenRepository>>()));
|
||||
|
||||
// Backfill service available only when dual-write is enabled.
|
||||
services.AddScoped<AuthorityBackfillService>();
|
||||
}
|
||||
else
|
||||
{
|
||||
services.AddScoped<ITokenRepository>(sp => sp.GetRequiredService<TokenRepository>());
|
||||
services.AddScoped<IRefreshTokenRepository>(sp => sp.GetRequiredService<RefreshTokenRepository>());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,13 +24,17 @@ public sealed class ApiKeyRepositoryTests : IAsyncLifetime
|
||||
_repository = new ApiKeyRepository(dataSource, NullLogger<ApiKeyRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
await SeedTenantAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetByPrefix_RoundTripsApiKey()
|
||||
{
|
||||
// Arrange
|
||||
var keyPrefix = "sk_live_" + Guid.NewGuid().ToString("N")[..8];
|
||||
var apiKey = new ApiKeyEntity
|
||||
{
|
||||
@@ -45,11 +49,10 @@ public sealed class ApiKeyRepositoryTests : IAsyncLifetime
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddYears(1)
|
||||
};
|
||||
|
||||
// Act
|
||||
await SeedUsersAsync(apiKey.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
var fetched = await _repository.GetByPrefixAsync(keyPrefix);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(apiKey.Id);
|
||||
fetched.Name.Should().Be("CI/CD Key");
|
||||
@@ -59,14 +62,12 @@ public sealed class ApiKeyRepositoryTests : IAsyncLifetime
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsApiKey()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "Test Key");
|
||||
await SeedUsersAsync(apiKey.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Name.Should().Be("Test Key");
|
||||
}
|
||||
@@ -74,81 +75,57 @@ public sealed class ApiKeyRepositoryTests : IAsyncLifetime
|
||||
[Fact]
|
||||
public async Task GetByUserId_ReturnsUserApiKeys()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var key1 = CreateApiKey(userId, "Key 1");
|
||||
var key2 = CreateApiKey(userId, "Key 2");
|
||||
await SeedUsersAsync(userId);
|
||||
await _repository.CreateAsync(_tenantId, key1);
|
||||
await _repository.CreateAsync(_tenantId, key2);
|
||||
|
||||
// Act
|
||||
var keys = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
keys.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllKeysForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var key1 = CreateApiKey(Guid.NewGuid(), "Key A");
|
||||
var key2 = CreateApiKey(Guid.NewGuid(), "Key B");
|
||||
await SeedUsersAsync(key1.UserId!.Value, key2.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, key1);
|
||||
await _repository.CreateAsync(_tenantId, key2);
|
||||
|
||||
// Act
|
||||
var keys = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
keys.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_UpdatesStatusAndRevokedFields()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "ToRevoke");
|
||||
await SeedUsersAsync(apiKey.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, apiKey.Id, "security@test.com");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.Status.Should().Be(ApiKeyStatus.Revoked);
|
||||
fetched.RevokedAt.Should().NotBeNull();
|
||||
fetched.RevokedBy.Should().Be("security@test.com");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateLastUsed_SetsLastUsedAt()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "Usage Test");
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
await _repository.UpdateLastUsedAsync(_tenantId, apiKey.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.LastUsedAt.Should().NotBeNull();
|
||||
fetched.LastUsedAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesApiKey()
|
||||
{
|
||||
// Arrange
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "ToDelete");
|
||||
var apiKey = CreateApiKey(Guid.NewGuid(), "DeleteKey");
|
||||
await SeedUsersAsync(apiKey.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, apiKey);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, apiKey.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
|
||||
// Assert
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, apiKey.Id);
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
@@ -158,10 +135,24 @@ public sealed class ApiKeyRepositoryTests : IAsyncLifetime
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
Name = name,
|
||||
KeyHash = $"sha256_{Guid.NewGuid():N}",
|
||||
KeyPrefix = $"sk_test_{Guid.NewGuid():N}"[..16],
|
||||
KeyHash = "sha256_key_" + Guid.NewGuid().ToString("N"),
|
||||
KeyPrefix = "sk_" + Guid.NewGuid().ToString("N")[..8],
|
||||
Scopes = ["read"],
|
||||
Status = ApiKeyStatus.Active,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddYears(1)
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddMonths(6)
|
||||
};
|
||||
|
||||
private Task SeedTenantAsync() =>
|
||||
_fixture.ExecuteSqlAsync(
|
||||
$"INSERT INTO authority.tenants (tenant_id, name, status, settings, metadata) " +
|
||||
$"VALUES ('{_tenantId}', 'Tenant {_tenantId}', 'active', '{{}}', '{{}}') " +
|
||||
"ON CONFLICT (tenant_id) DO NOTHING;");
|
||||
|
||||
private Task SeedUsersAsync(params Guid[] userIds)
|
||||
{
|
||||
var statements = string.Join("\n", userIds.Distinct().Select(id =>
|
||||
$"INSERT INTO authority.users (id, tenant_id, username, status) VALUES ('{id}', '{_tenantId}', 'user-{id:N}', 'active') ON CONFLICT (id) DO NOTHING;"));
|
||||
return _fixture.ExecuteSqlAsync(statements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Authority.Storage.Postgres.Backfill;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using StellaOps.Authority.Storage.Postgres.Tests.TestDoubles;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
public sealed class BackfillVerificationTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Backfill_copies_tokens_and_refresh_tokens_and_checksums_match()
|
||||
{
|
||||
var tenantId = "tenant-a";
|
||||
var primaryTokens = new InMemoryTokenRepository();
|
||||
var secondaryTokens = new InMemoryTokenRepository();
|
||||
var primaryRefresh = new InMemoryRefreshTokenRepository();
|
||||
var secondaryRefresh = new InMemoryRefreshTokenRepository();
|
||||
var primaryUsers = new InMemoryUserRepository();
|
||||
var secondaryUsers = new InMemoryUserRepository();
|
||||
var user = BuildUser(tenantId);
|
||||
await secondaryUsers.CreateAsync(user);
|
||||
|
||||
var token = BuildToken(tenantId, user.Id);
|
||||
var refresh = BuildRefreshToken(tenantId, user.Id, token.Id);
|
||||
await secondaryTokens.CreateAsync(tenantId, token);
|
||||
await secondaryRefresh.CreateAsync(tenantId, refresh);
|
||||
|
||||
var backfill = new AuthorityBackfillService(
|
||||
primaryTokens,
|
||||
secondaryTokens,
|
||||
primaryRefresh,
|
||||
secondaryRefresh,
|
||||
primaryUsers,
|
||||
secondaryUsers,
|
||||
NullLogger<AuthorityBackfillService>.Instance);
|
||||
|
||||
var result = await backfill.BackfillAsync(tenantId);
|
||||
|
||||
result.TokensCopied.Should().Be(1);
|
||||
result.RefreshTokensCopied.Should().Be(1);
|
||||
result.ChecksumsMatch.Should().BeTrue();
|
||||
primaryTokens.Snapshot().Should().ContainSingle(t => t.Id == token.Id);
|
||||
primaryRefresh.Snapshot().Should().ContainSingle(t => t.Id == refresh.Id);
|
||||
}
|
||||
|
||||
private static UserEntity BuildUser(string tenantId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = tenantId,
|
||||
Username = "user1",
|
||||
Email = "user1@example.com",
|
||||
Enabled = true,
|
||||
EmailVerified = true,
|
||||
MfaEnabled = false,
|
||||
FailedLoginAttempts = 0,
|
||||
Settings = "{}",
|
||||
Metadata = "{}",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
private static TokenEntity BuildToken(string tenantId, Guid userId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = "hash-primary",
|
||||
TokenType = TokenType.Access,
|
||||
Scopes = new[] { "scope-a" },
|
||||
ClientId = "client",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(1),
|
||||
Metadata = "{}"
|
||||
};
|
||||
|
||||
private static RefreshTokenEntity BuildRefreshToken(string tenantId, Guid userId, Guid accessTokenId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = "r-hash",
|
||||
AccessTokenId = accessTokenId,
|
||||
ClientId = "client",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(1),
|
||||
Metadata = "{}"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Authority.Storage.Postgres;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using StellaOps.Authority.Storage.Postgres.Tests.TestDoubles;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests;
|
||||
|
||||
public sealed class DualWriteRepositoryTests
|
||||
{
|
||||
private static DualWriteOptions DefaultOptions() => new()
|
||||
{
|
||||
Enabled = true,
|
||||
WriteSecondary = true,
|
||||
FallbackToSecondary = true,
|
||||
LogSecondaryFailuresOnly = true
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public async Task Create_writes_to_primary_and_secondary()
|
||||
{
|
||||
var primary = new InMemoryTokenRepository();
|
||||
var secondary = new InMemoryTokenRepository();
|
||||
var sut = new DualWriteTokenRepository(primary, secondary, Options.Create(DefaultOptions()), new DualWriteMetrics(), NullLogger<DualWriteTokenRepository>.Instance);
|
||||
var token = BuildToken();
|
||||
|
||||
var id = await sut.CreateAsync("tenant-a", token);
|
||||
|
||||
id.Should().NotBe(Guid.Empty);
|
||||
primary.Snapshot().Should().ContainSingle(t => t.Id == id);
|
||||
secondary.Snapshot().Should().ContainSingle(t => t.Id == id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Read_falls_back_to_secondary_when_primary_missing()
|
||||
{
|
||||
var primary = new InMemoryTokenRepository();
|
||||
var secondary = new InMemoryTokenRepository();
|
||||
var token = BuildToken();
|
||||
await secondary.CreateAsync(token.TenantId, token);
|
||||
var sut = new DualWriteTokenRepository(primary, secondary, Options.Create(DefaultOptions()), new DualWriteMetrics(), NullLogger<DualWriteTokenRepository>.Instance);
|
||||
|
||||
var fetched = await sut.GetByIdAsync(token.TenantId, token.Id);
|
||||
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Secondary_failure_does_not_block_primary_when_failfast_disabled()
|
||||
{
|
||||
var primary = new InMemoryTokenRepository();
|
||||
var secondary = new InMemoryTokenRepository { FailWrites = true };
|
||||
var options = DefaultOptions();
|
||||
options.FailFastOnSecondary = false;
|
||||
options.LogSecondaryFailuresOnly = true;
|
||||
var sut = new DualWriteTokenRepository(primary, secondary, Options.Create(options), new DualWriteMetrics(), NullLogger<DualWriteTokenRepository>.Instance);
|
||||
var token = BuildToken();
|
||||
|
||||
await sut.Invoking(s => s.CreateAsync(token.TenantId, token)).Should().NotThrowAsync();
|
||||
primary.Snapshot().Should().ContainSingle(t => t.Id == token.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Refresh_tokens_dual_write_honours_secondary()
|
||||
{
|
||||
var primary = new InMemoryRefreshTokenRepository();
|
||||
var secondary = new InMemoryRefreshTokenRepository();
|
||||
var options = DefaultOptions();
|
||||
var sut = new DualWriteRefreshTokenRepository(primary, secondary, Options.Create(options), new DualWriteMetrics(), NullLogger<DualWriteRefreshTokenRepository>.Instance);
|
||||
var token = BuildRefreshToken();
|
||||
|
||||
var id = await sut.CreateAsync(token.TenantId, token);
|
||||
|
||||
primary.Snapshot().Should().ContainSingle(t => t.Id == id);
|
||||
secondary.Snapshot().Should().ContainSingle(t => t.Id == id);
|
||||
}
|
||||
|
||||
private static TokenEntity BuildToken() => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = "tenant-a",
|
||||
UserId = Guid.NewGuid(),
|
||||
TokenHash = "hash-123",
|
||||
TokenType = TokenType.Access,
|
||||
Scopes = new[] { "scope1", "scope2" },
|
||||
ClientId = "client",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(1),
|
||||
Metadata = "{}"
|
||||
};
|
||||
|
||||
private static RefreshTokenEntity BuildRefreshToken() => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = "tenant-a",
|
||||
UserId = Guid.NewGuid(),
|
||||
TokenHash = "r-hash-1",
|
||||
AccessTokenId = Guid.NewGuid(),
|
||||
ClientId = "client",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(1),
|
||||
Metadata = "{}"
|
||||
};
|
||||
}
|
||||
@@ -24,13 +24,17 @@ public sealed class PermissionRepositoryTests : IAsyncLifetime
|
||||
_repository = new PermissionRepository(dataSource, NullLogger<PermissionRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
await SeedTenantAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsPermission()
|
||||
{
|
||||
// Arrange
|
||||
var permission = new PermissionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
@@ -41,11 +45,9 @@ public sealed class PermissionRepositoryTests : IAsyncLifetime
|
||||
Description = "Read user data"
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, permission);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, permission.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Name.Should().Be("users:read");
|
||||
fetched.Resource.Should().Be("users");
|
||||
@@ -55,79 +57,66 @@ public sealed class PermissionRepositoryTests : IAsyncLifetime
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectPermission()
|
||||
{
|
||||
// Arrange
|
||||
var permission = new PermissionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "roles:write",
|
||||
Resource = "roles",
|
||||
Action = "write"
|
||||
};
|
||||
var permission = BuildPermission("tokens:revoke", "tokens", "revoke", "Revoke tokens");
|
||||
await _repository.CreateAsync(_tenantId, permission);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "roles:write");
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "tokens:revoke");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(permission.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllPermissionsForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var perm1 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "p1", Resource = "r1", Action = "a1" };
|
||||
var perm2 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "p2", Resource = "r2", Action = "a2" };
|
||||
await _repository.CreateAsync(_tenantId, perm1);
|
||||
await _repository.CreateAsync(_tenantId, perm2);
|
||||
|
||||
// Act
|
||||
var permissions = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
permissions.Should().HaveCount(2);
|
||||
fetched!.Action.Should().Be("revoke");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByResource_ReturnsResourcePermissions()
|
||||
{
|
||||
// Arrange
|
||||
var perm1 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "scans:read", Resource = "scans", Action = "read" };
|
||||
var perm2 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "scans:write", Resource = "scans", Action = "write" };
|
||||
var perm3 = new PermissionEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "users:read", Resource = "users", Action = "read" };
|
||||
await _repository.CreateAsync(_tenantId, perm1);
|
||||
await _repository.CreateAsync(_tenantId, perm2);
|
||||
await _repository.CreateAsync(_tenantId, perm3);
|
||||
var p1 = BuildPermission("users:read", "users", "read", "Read");
|
||||
var p2 = BuildPermission("users:write", "users", "write", "Write");
|
||||
await _repository.CreateAsync(_tenantId, p1);
|
||||
await _repository.CreateAsync(_tenantId, p2);
|
||||
|
||||
// Act
|
||||
var permissions = await _repository.GetByResourceAsync(_tenantId, "scans");
|
||||
var perms = await _repository.GetByResourceAsync(_tenantId, "users");
|
||||
|
||||
// Assert
|
||||
permissions.Should().HaveCount(2);
|
||||
permissions.Should().AllSatisfy(p => p.Resource.Should().Be("scans"));
|
||||
perms.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllPermissionsForTenant()
|
||||
{
|
||||
var p1 = BuildPermission("orch:read", "orch", "read", "Read orch");
|
||||
var p2 = BuildPermission("orch:write", "orch", "write", "Write orch");
|
||||
await _repository.CreateAsync(_tenantId, p1);
|
||||
await _repository.CreateAsync(_tenantId, p2);
|
||||
|
||||
var perms = await _repository.ListAsync(_tenantId);
|
||||
|
||||
perms.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesPermission()
|
||||
{
|
||||
// Arrange
|
||||
var permission = new PermissionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "temp:delete",
|
||||
Resource = "temp",
|
||||
Action = "delete"
|
||||
};
|
||||
var permission = BuildPermission("tokens:revoke", "tokens", "revoke", "Revoke tokens");
|
||||
await _repository.CreateAsync(_tenantId, permission);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, permission.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, permission.Id);
|
||||
|
||||
// Assert
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, permission.Id);
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private PermissionEntity BuildPermission(string name, string resource, string action, string description) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
Resource = resource,
|
||||
Action = action,
|
||||
Description = description
|
||||
};
|
||||
|
||||
private Task SeedTenantAsync() =>
|
||||
_fixture.ExecuteSqlAsync(
|
||||
$"INSERT INTO authority.tenants (tenant_id, name, status, settings, metadata) " +
|
||||
$"VALUES ('{_tenantId}', 'Tenant {_tenantId}', 'active', '{{}}', '{{}}') " +
|
||||
"ON CONFLICT (tenant_id) DO NOTHING;");
|
||||
}
|
||||
|
||||
@@ -25,122 +25,110 @@ public sealed class RefreshTokenRepositoryTests : IAsyncLifetime
|
||||
_repository = new RefreshTokenRepository(dataSource, NullLogger<RefreshTokenRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
await SeedTenantAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetByHash_RoundTripsRefreshToken()
|
||||
{
|
||||
// Arrange
|
||||
var token = new RefreshTokenEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
TokenHash = "refresh_hash_" + Guid.NewGuid().ToString("N"),
|
||||
AccessTokenId = Guid.NewGuid(),
|
||||
ClientId = "web-app",
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(30)
|
||||
};
|
||||
var refresh = BuildToken(Guid.NewGuid());
|
||||
await SeedUsersAsync(refresh.UserId);
|
||||
await SeedAccessTokensAsync((refresh.AccessTokenId!.Value, refresh.UserId));
|
||||
await _repository.CreateAsync(_tenantId, refresh);
|
||||
|
||||
// Act
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
var fetched = await _repository.GetByHashAsync(refresh.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
fetched.ClientId.Should().Be("web-app");
|
||||
fetched!.Id.Should().Be(refresh.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetById_ReturnsToken()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateRefreshToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var refresh = BuildToken(Guid.NewGuid());
|
||||
await SeedUsersAsync(refresh.UserId);
|
||||
await SeedAccessTokensAsync((refresh.AccessTokenId!.Value, refresh.UserId));
|
||||
await _repository.CreateAsync(_tenantId, refresh);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, token.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, refresh.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(token.Id);
|
||||
fetched!.UserId.Should().Be(refresh.UserId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_ReturnsUserTokens()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateRefreshToken(userId);
|
||||
var token2 = CreateRefreshToken(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
var t1 = BuildToken(userId);
|
||||
var t2 = BuildToken(userId);
|
||||
await SeedUsersAsync(userId);
|
||||
await SeedAccessTokensAsync((t1.AccessTokenId!.Value, userId), (t2.AccessTokenId!.Value, userId));
|
||||
await _repository.CreateAsync(_tenantId, t1);
|
||||
await _repository.CreateAsync(_tenantId, t2);
|
||||
|
||||
// Act
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
// Assert
|
||||
tokens.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_SetsRevokedFields()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateRefreshToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var refresh = BuildToken(Guid.NewGuid());
|
||||
await SeedUsersAsync(refresh.UserId);
|
||||
await SeedAccessTokensAsync((refresh.AccessTokenId!.Value, refresh.UserId));
|
||||
await _repository.CreateAsync(_tenantId, refresh);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, token.Id, "admin@test.com", null);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
await _repository.RevokeAsync(_tenantId, refresh.Id, "tester", Guid.Empty);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, refresh.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.RevokedAt.Should().NotBeNull();
|
||||
fetched.RevokedBy.Should().Be("admin@test.com");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_WithReplacedBy_SetsReplacedByField()
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateRefreshToken(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var newTokenId = Guid.NewGuid();
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, token.Id, "rotation", newTokenId);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
// Assert
|
||||
fetched!.RevokedAt.Should().NotBeNull();
|
||||
fetched.ReplacedBy.Should().Be(newTokenId);
|
||||
fetched.RevokedBy.Should().Be("tester");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RevokeByUserId_RevokesAllUserTokens()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateRefreshToken(userId);
|
||||
var token2 = CreateRefreshToken(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
var t1 = BuildToken(userId);
|
||||
var t2 = BuildToken(userId);
|
||||
await SeedUsersAsync(userId);
|
||||
await SeedAccessTokensAsync((t1.AccessTokenId!.Value, userId), (t2.AccessTokenId!.Value, userId));
|
||||
await _repository.CreateAsync(_tenantId, t1);
|
||||
await _repository.CreateAsync(_tenantId, t2);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeByUserIdAsync(_tenantId, userId, "security_action");
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
await _repository.RevokeByUserIdAsync(_tenantId, userId, "bulk-revoke");
|
||||
|
||||
// Assert
|
||||
tokens.Should().AllSatisfy(t => t.RevokedAt.Should().NotBeNull());
|
||||
var revoked1 = await _repository.GetByIdAsync(_tenantId, t1.Id);
|
||||
var revoked2 = await _repository.GetByIdAsync(_tenantId, t2.Id);
|
||||
revoked1!.RevokedAt.Should().NotBeNull();
|
||||
revoked2!.RevokedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Revoke_WithReplacedBy_SetsReplacedByField()
|
||||
{
|
||||
var refresh = BuildToken(Guid.NewGuid());
|
||||
await SeedUsersAsync(refresh.UserId);
|
||||
await SeedAccessTokensAsync((refresh.AccessTokenId!.Value, refresh.UserId));
|
||||
await _repository.CreateAsync(_tenantId, refresh);
|
||||
var newTokenId = Guid.NewGuid();
|
||||
|
||||
await _repository.RevokeAsync(_tenantId, refresh.Id, "rotate", newTokenId);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, refresh.Id);
|
||||
|
||||
fetched!.ReplacedBy.Should().Be(newTokenId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_IsDeterministic_WhenIssuedAtTies()
|
||||
{
|
||||
// Arrange: fixed IDs with same IssuedAt to assert stable ordering
|
||||
var userId = Guid.NewGuid();
|
||||
var issuedAt = new DateTimeOffset(2025, 11, 30, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
@@ -151,42 +139,30 @@ public sealed class RefreshTokenRepositoryTests : IAsyncLifetime
|
||||
Id = Guid.Parse("aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa"),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = "rhash1-" + Guid.NewGuid().ToString("N"),
|
||||
AccessTokenId = Guid.Parse("10000000-0000-0000-0000-000000000000"),
|
||||
ClientId = "web-app",
|
||||
TokenHash = "hash-a",
|
||||
AccessTokenId = Guid.NewGuid(),
|
||||
IssuedAt = issuedAt,
|
||||
ExpiresAt = issuedAt.AddDays(30)
|
||||
ExpiresAt = issuedAt.AddHours(1)
|
||||
},
|
||||
new RefreshTokenEntity
|
||||
{
|
||||
Id = Guid.Parse("bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb"),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = "rhash2-" + Guid.NewGuid().ToString("N"),
|
||||
AccessTokenId = Guid.Parse("20000000-0000-0000-0000-000000000000"),
|
||||
ClientId = "web-app",
|
||||
TokenHash = "hash-b",
|
||||
AccessTokenId = Guid.NewGuid(),
|
||||
IssuedAt = issuedAt,
|
||||
ExpiresAt = issuedAt.AddDays(30)
|
||||
},
|
||||
new RefreshTokenEntity
|
||||
{
|
||||
Id = Guid.Parse("cccccccc-cccc-cccc-cccc-cccccccccccc"),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = "rhash3-" + Guid.NewGuid().ToString("N"),
|
||||
AccessTokenId = Guid.Parse("30000000-0000-0000-0000-000000000000"),
|
||||
ClientId = "web-app",
|
||||
IssuedAt = issuedAt,
|
||||
ExpiresAt = issuedAt.AddDays(30)
|
||||
ExpiresAt = issuedAt.AddHours(1)
|
||||
}
|
||||
};
|
||||
|
||||
await SeedUsersAsync(userId);
|
||||
await SeedAccessTokensAsync((tokens[0].AccessTokenId!.Value, userId), (tokens[1].AccessTokenId!.Value, userId));
|
||||
foreach (var token in tokens.Reverse())
|
||||
{
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
}
|
||||
|
||||
// Act
|
||||
var first = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
var second = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
|
||||
@@ -196,18 +172,40 @@ public sealed class RefreshTokenRepositoryTests : IAsyncLifetime
|
||||
.Select(t => t.Id)
|
||||
.ToArray();
|
||||
|
||||
// Assert
|
||||
first.Select(t => t.Id).Should().ContainInOrder(expectedOrder);
|
||||
second.Should().BeEquivalentTo(first, o => o.WithStrictOrdering());
|
||||
}
|
||||
|
||||
private RefreshTokenEntity CreateRefreshToken(Guid userId) => new()
|
||||
private RefreshTokenEntity BuildToken(Guid userId) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
TokenHash = $"refresh_{Guid.NewGuid():N}",
|
||||
TokenHash = "refresh_" + Guid.NewGuid().ToString("N"),
|
||||
AccessTokenId = Guid.NewGuid(),
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(30)
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(2)
|
||||
};
|
||||
|
||||
private Task SeedTenantAsync() =>
|
||||
_fixture.ExecuteSqlAsync(
|
||||
$"INSERT INTO authority.tenants (tenant_id, name, status, settings, metadata) " +
|
||||
$"VALUES ('{_tenantId}', 'Tenant {_tenantId}', 'active', '{{}}', '{{}}') " +
|
||||
"ON CONFLICT (tenant_id) DO NOTHING;");
|
||||
|
||||
private Task SeedUsersAsync(params Guid[] userIds)
|
||||
{
|
||||
var statements = string.Join("\n", userIds.Distinct().Select(id =>
|
||||
$"INSERT INTO authority.users (id, tenant_id, username, status) VALUES ('{id}', '{_tenantId}', 'user-{id:N}', 'active') ON CONFLICT (id) DO NOTHING;"));
|
||||
return _fixture.ExecuteSqlAsync(statements);
|
||||
}
|
||||
|
||||
private Task SeedAccessTokensAsync(params (Guid TokenId, Guid UserId)[] tokens)
|
||||
{
|
||||
var statements = string.Join("\n", tokens.Distinct().Select(t =>
|
||||
$"INSERT INTO authority.tokens (id, tenant_id, user_id, token_hash, token_type, scopes, expires_at, metadata) " +
|
||||
$"VALUES ('{t.TokenId}', '{_tenantId}', '{t.UserId}', 'seed-hash-{t.TokenId:N}', 'access', '{{}}', NOW() + INTERVAL '1 day', '{{}}') " +
|
||||
"ON CONFLICT (id) DO NOTHING;"));
|
||||
return _fixture.ExecuteSqlAsync(statements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,117 +24,99 @@ public sealed class RoleRepositoryTests : IAsyncLifetime
|
||||
_repository = new RoleRepository(dataSource, NullLogger<RoleRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
await SeedTenantAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "admin",
|
||||
DisplayName = "Administrator",
|
||||
Description = "Full system access",
|
||||
IsSystem = true,
|
||||
Metadata = "{\"level\": 1}"
|
||||
};
|
||||
|
||||
// Act
|
||||
var role = BuildRole("Admin");
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(role.Id);
|
||||
fetched.Name.Should().Be("admin");
|
||||
fetched.DisplayName.Should().Be("Administrator");
|
||||
fetched.IsSystem.Should().BeTrue();
|
||||
fetched!.Name.Should().Be("Admin");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByName_ReturnsCorrectRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "viewer",
|
||||
DisplayName = "Viewer",
|
||||
Description = "Read-only access"
|
||||
};
|
||||
var role = BuildRole("Reader");
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "viewer");
|
||||
var fetched = await _repository.GetByNameAsync(_tenantId, "Reader");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(role.Id);
|
||||
fetched!.Description.Should().Be("Reader role");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task List_ReturnsAllRolesForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var role1 = new RoleEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "role1" };
|
||||
var role2 = new RoleEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "role2" };
|
||||
await _repository.CreateAsync(_tenantId, role1);
|
||||
await _repository.CreateAsync(_tenantId, role2);
|
||||
await _repository.CreateAsync(_tenantId, BuildRole("Reader"));
|
||||
await _repository.CreateAsync(_tenantId, BuildRole("Writer"));
|
||||
|
||||
// Act
|
||||
var roles = await _repository.ListAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
roles.Should().HaveCount(2);
|
||||
roles.Select(r => r.Name).Should().Contain(["role1", "role2"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Update_ModifiesRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "editor",
|
||||
DisplayName = "Editor"
|
||||
};
|
||||
var role = BuildRole("Updater");
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
// Act
|
||||
var updated = new RoleEntity
|
||||
{
|
||||
Id = role.Id,
|
||||
TenantId = _tenantId,
|
||||
Name = "editor",
|
||||
DisplayName = "Content Editor",
|
||||
Description = "Updated description"
|
||||
TenantId = role.TenantId,
|
||||
Name = role.Name,
|
||||
Description = "Updated description",
|
||||
DisplayName = role.DisplayName,
|
||||
IsSystem = role.IsSystem,
|
||||
Metadata = role.Metadata,
|
||||
CreatedAt = role.CreatedAt,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
await _repository.UpdateAsync(_tenantId, updated);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.DisplayName.Should().Be("Content Editor");
|
||||
fetched.Description.Should().Be("Updated description");
|
||||
await _repository.UpdateAsync(_tenantId, updated);
|
||||
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
fetched!.Description.Should().Be("Updated description");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Delete_RemovesRole()
|
||||
{
|
||||
// Arrange
|
||||
var role = new RoleEntity { Id = Guid.NewGuid(), TenantId = _tenantId, Name = "temp" };
|
||||
var role = BuildRole("Deleter");
|
||||
await _repository.CreateAsync(_tenantId, role);
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(_tenantId, role.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
|
||||
// Assert
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, role.Id);
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private RoleEntity BuildRole(string name) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
Description = $"{name} role",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
private Task SeedTenantAsync() =>
|
||||
_fixture.ExecuteSqlAsync(
|
||||
$"INSERT INTO authority.tenants (tenant_id, name, status, settings, metadata) " +
|
||||
$"VALUES ('{_tenantId}', 'Tenant {_tenantId}', 'active', '{{}}', '{{}}') " +
|
||||
"ON CONFLICT (tenant_id) DO NOTHING;");
|
||||
}
|
||||
|
||||
@@ -24,156 +24,81 @@ public sealed class SessionRepositoryTests : IAsyncLifetime
|
||||
_repository = new SessionRepository(dataSource, NullLogger<SessionRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
await SeedTenantAsync();
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGet_RoundTripsSession()
|
||||
{
|
||||
// Arrange
|
||||
var session = new SessionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
SessionTokenHash = "session_hash_" + Guid.NewGuid().ToString("N"),
|
||||
IpAddress = "192.168.1.1",
|
||||
UserAgent = "Mozilla/5.0",
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
LastActivityAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7)
|
||||
};
|
||||
|
||||
// Act
|
||||
var session = BuildSession();
|
||||
await SeedUsersAsync(session.UserId);
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, session.Id);
|
||||
|
||||
// Assert
|
||||
var fetched = await _repository.GetByTokenHashAsync(session.SessionTokenHash);
|
||||
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(session.Id);
|
||||
fetched.IpAddress.Should().Be("192.168.1.1");
|
||||
fetched.UserAgent.Should().Be("Mozilla/5.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByTokenHash_ReturnsSession()
|
||||
{
|
||||
// Arrange
|
||||
var tokenHash = "lookup_hash_" + Guid.NewGuid().ToString("N");
|
||||
var session = new SessionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = Guid.NewGuid(),
|
||||
SessionTokenHash = tokenHash,
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
LastActivityAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7)
|
||||
};
|
||||
var session = BuildSession();
|
||||
await SeedUsersAsync(session.UserId);
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByTokenHashAsync(tokenHash);
|
||||
var fetched = await _repository.GetByTokenHashAsync(session.SessionTokenHash);
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.Id.Should().Be(session.Id);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByUserId_WithActiveOnly_ReturnsOnlyActiveSessions()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var activeSession = CreateSession(userId);
|
||||
var endedSession = new SessionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
SessionTokenHash = "ended_" + Guid.NewGuid().ToString("N"),
|
||||
StartedAt = DateTimeOffset.UtcNow.AddHours(-2),
|
||||
LastActivityAt = DateTimeOffset.UtcNow.AddHours(-1),
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7),
|
||||
EndedAt = DateTimeOffset.UtcNow,
|
||||
EndReason = "logout"
|
||||
};
|
||||
|
||||
await _repository.CreateAsync(_tenantId, activeSession);
|
||||
await _repository.CreateAsync(_tenantId, endedSession);
|
||||
|
||||
// Act
|
||||
var activeSessions = await _repository.GetByUserIdAsync(_tenantId, userId, activeOnly: true);
|
||||
var allSessions = await _repository.GetByUserIdAsync(_tenantId, userId, activeOnly: false);
|
||||
|
||||
// Assert
|
||||
activeSessions.Should().HaveCount(1);
|
||||
allSessions.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateLastActivity_UpdatesTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var session = CreateSession(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
|
||||
// Act
|
||||
await Task.Delay(100); // Ensure time difference
|
||||
await _repository.UpdateLastActivityAsync(_tenantId, session.Id);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, session.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.LastActivityAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(5));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task End_SetsEndFieldsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var session = CreateSession(Guid.NewGuid());
|
||||
await _repository.CreateAsync(_tenantId, session);
|
||||
|
||||
// Act
|
||||
await _repository.EndAsync(_tenantId, session.Id, "session_timeout");
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, session.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.EndedAt.Should().NotBeNull();
|
||||
fetched.EndReason.Should().Be("session_timeout");
|
||||
fetched!.UserId.Should().Be(session.UserId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EndByUserId_EndsAllUserSessions()
|
||||
{
|
||||
// Arrange
|
||||
var userId = Guid.NewGuid();
|
||||
var session1 = CreateSession(userId);
|
||||
var session2 = CreateSession(userId);
|
||||
await _repository.CreateAsync(_tenantId, session1);
|
||||
await _repository.CreateAsync(_tenantId, session2);
|
||||
var s1 = BuildSession(userId);
|
||||
var s2 = BuildSession(userId);
|
||||
await SeedUsersAsync(userId);
|
||||
await _repository.CreateAsync(_tenantId, s1);
|
||||
await _repository.CreateAsync(_tenantId, s2);
|
||||
|
||||
// Act
|
||||
await _repository.EndByUserIdAsync(_tenantId, userId, "forced_logout");
|
||||
var sessions = await _repository.GetByUserIdAsync(_tenantId, userId, activeOnly: false);
|
||||
await _repository.EndByUserIdAsync(_tenantId, userId, "test-end");
|
||||
|
||||
// Assert
|
||||
sessions.Should().HaveCount(2);
|
||||
sessions.Should().AllSatisfy(s =>
|
||||
{
|
||||
s.EndedAt.Should().NotBeNull();
|
||||
s.EndReason.Should().Be("forced_logout");
|
||||
});
|
||||
var s1Fetched = await _repository.GetByIdAsync(_tenantId, s1.Id);
|
||||
var s2Fetched = await _repository.GetByIdAsync(_tenantId, s2.Id);
|
||||
s1Fetched!.EndedAt.Should().NotBeNull();
|
||||
s2Fetched!.EndedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
private SessionEntity CreateSession(Guid userId) => new()
|
||||
private SessionEntity BuildSession(Guid? userId = null) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
UserId = userId,
|
||||
SessionTokenHash = $"session_{Guid.NewGuid():N}",
|
||||
UserId = userId ?? Guid.NewGuid(),
|
||||
SessionTokenHash = "session_hash_" + Guid.NewGuid().ToString("N"),
|
||||
IpAddress = "192.168.1.1",
|
||||
UserAgent = "Mozilla/5.0",
|
||||
StartedAt = DateTimeOffset.UtcNow,
|
||||
LastActivityAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddDays(7)
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(6)
|
||||
};
|
||||
|
||||
private Task SeedTenantAsync() =>
|
||||
_fixture.ExecuteSqlAsync(
|
||||
$"INSERT INTO authority.tenants (tenant_id, name, status, settings, metadata) " +
|
||||
$"VALUES ('{_tenantId}', 'Tenant {_tenantId}', 'active', '{{}}', '{{}}') " +
|
||||
"ON CONFLICT (tenant_id) DO NOTHING;");
|
||||
|
||||
private Task SeedUsersAsync(params Guid[] userIds)
|
||||
{
|
||||
var statements = string.Join("\n", userIds.Distinct().Select(id =>
|
||||
$"INSERT INTO authority.users (id, tenant_id, username, status) VALUES ('{id}', '{_tenantId}', 'user-{id:N}', 'active') ON CONFLICT (id) DO NOTHING;"));
|
||||
return _fixture.ExecuteSqlAsync(statements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
|
||||
@@ -0,0 +1,281 @@
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using System.Collections.Concurrent;
|
||||
|
||||
namespace StellaOps.Authority.Storage.Postgres.Tests.TestDoubles;
|
||||
|
||||
internal sealed class InMemoryTokenRepository : ITokenRepository, ISecondaryTokenRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, TokenEntity> _tokens = new();
|
||||
public bool FailWrites { get; set; }
|
||||
|
||||
public Task<TokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_tokens.TryGetValue(id, out var token) && token.TenantId == tenantId ? token : null);
|
||||
|
||||
public Task<TokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_tokens.Values.FirstOrDefault(t => t.TokenHash == tokenHash));
|
||||
|
||||
public Task<IReadOnlyList<TokenEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var list = _tokens.Values
|
||||
.Where(t => t.TenantId == tenantId && t.UserId == userId)
|
||||
.OrderByDescending(t => t.IssuedAt)
|
||||
.ThenBy(t => t.Id)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<TokenEntity>>(list);
|
||||
}
|
||||
|
||||
public Task<Guid> CreateAsync(string tenantId, TokenEntity token, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
var id = token.Id == Guid.Empty ? Guid.NewGuid() : token.Id;
|
||||
_tokens[id] = AuthorityCloneHelpers.CloneToken(token, id, tenantId);
|
||||
return Task.FromResult(id);
|
||||
}
|
||||
|
||||
public Task RevokeAsync(string tenantId, Guid id, string revokedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
if (_tokens.TryGetValue(id, out var token) && token.TenantId == tenantId)
|
||||
{
|
||||
_tokens[id] = AuthorityCloneHelpers.CloneToken(token, token.Id, token.TenantId, revokedAt: DateTimeOffset.UtcNow, revokedBy: revokedBy);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RevokeByUserIdAsync(string tenantId, Guid userId, string revokedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
foreach (var kvp in _tokens.Where(kvp => kvp.Value.TenantId == tenantId && kvp.Value.UserId == userId))
|
||||
{
|
||||
_tokens[kvp.Key] = AuthorityCloneHelpers.CloneToken(kvp.Value, kvp.Value.Id, kvp.Value.TenantId, revokedAt: DateTimeOffset.UtcNow, revokedBy: revokedBy);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DeleteExpiredAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
foreach (var kvp in _tokens.Where(kvp => kvp.Value.ExpiresAt < now).ToList())
|
||||
{
|
||||
_tokens.TryRemove(kvp.Key, out _);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<TokenEntity> Snapshot() => _tokens.Values.ToList();
|
||||
}
|
||||
|
||||
internal sealed class InMemoryRefreshTokenRepository : IRefreshTokenRepository, ISecondaryRefreshTokenRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, RefreshTokenEntity> _tokens = new();
|
||||
public bool FailWrites { get; set; }
|
||||
|
||||
public Task<RefreshTokenEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_tokens.TryGetValue(id, out var token) && token.TenantId == tenantId ? token : null);
|
||||
|
||||
public Task<RefreshTokenEntity?> GetByHashAsync(string tokenHash, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_tokens.Values.FirstOrDefault(t => t.TokenHash == tokenHash));
|
||||
|
||||
public Task<IReadOnlyList<RefreshTokenEntity>> GetByUserIdAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var list = _tokens.Values
|
||||
.Where(t => t.TenantId == tenantId && t.UserId == userId)
|
||||
.OrderByDescending(t => t.IssuedAt)
|
||||
.ThenBy(t => t.Id)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<RefreshTokenEntity>>(list);
|
||||
}
|
||||
|
||||
public Task<Guid> CreateAsync(string tenantId, RefreshTokenEntity token, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
var id = token.Id == Guid.Empty ? Guid.NewGuid() : token.Id;
|
||||
_tokens[id] = AuthorityCloneHelpers.CloneRefresh(token, id, tenantId);
|
||||
return Task.FromResult(id);
|
||||
}
|
||||
|
||||
public Task RevokeAsync(string tenantId, Guid id, string revokedBy, Guid? replacedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
if (_tokens.TryGetValue(id, out var token) && token.TenantId == tenantId)
|
||||
{
|
||||
_tokens[id] = AuthorityCloneHelpers.CloneRefresh(token, token.Id, token.TenantId, revokedAt: DateTimeOffset.UtcNow, revokedBy: revokedBy, replacedBy: replacedBy);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RevokeByUserIdAsync(string tenantId, Guid userId, string revokedBy, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
foreach (var kvp in _tokens.Where(kvp => kvp.Value.TenantId == tenantId && kvp.Value.UserId == userId))
|
||||
{
|
||||
_tokens[kvp.Key] = AuthorityCloneHelpers.CloneRefresh(kvp.Value, kvp.Value.Id, kvp.Value.TenantId, revokedAt: DateTimeOffset.UtcNow, revokedBy: revokedBy);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DeleteExpiredAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (FailWrites) throw new InvalidOperationException("Simulated secondary failure");
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
foreach (var kvp in _tokens.Where(kvp => kvp.Value.ExpiresAt < now).ToList())
|
||||
{
|
||||
_tokens.TryRemove(kvp.Key, out _);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<RefreshTokenEntity> Snapshot() => _tokens.Values.ToList();
|
||||
}
|
||||
|
||||
internal sealed class InMemoryUserRepository : IUserRepository, ISecondaryUserRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, UserEntity> _users = new();
|
||||
|
||||
public Task<UserEntity> CreateAsync(UserEntity user, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_users[user.Id] = user;
|
||||
return Task.FromResult(user);
|
||||
}
|
||||
|
||||
public Task<UserEntity?> GetByIdAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_users.TryGetValue(id, out var user) && user.TenantId == tenantId ? user : null);
|
||||
|
||||
public Task<UserEntity?> GetByUsernameAsync(string tenantId, string username, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_users.Values.FirstOrDefault(u => u.TenantId == tenantId && u.Username == username));
|
||||
|
||||
public Task<UserEntity?> GetByEmailAsync(string tenantId, string email, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_users.Values.FirstOrDefault(u => u.TenantId == tenantId && u.Email == email));
|
||||
|
||||
public Task<IReadOnlyList<UserEntity>> GetAllAsync(string tenantId, bool? enabled = null, int limit = 100, int offset = 0, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var filtered = _users.Values
|
||||
.Where(u => u.TenantId == tenantId && (!enabled.HasValue || u.Enabled == enabled.Value))
|
||||
.Skip(offset)
|
||||
.Take(limit)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<UserEntity>>(filtered);
|
||||
}
|
||||
|
||||
public Task<bool> UpdateAsync(UserEntity user, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_users[user.Id] = user;
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
|
||||
public Task<bool> DeleteAsync(string tenantId, Guid id, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(_users.TryRemove(id, out _));
|
||||
|
||||
public Task<bool> UpdatePasswordAsync(string tenantId, Guid userId, string passwordHash, string passwordSalt, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_users.TryGetValue(userId, out var user) && user.TenantId == tenantId)
|
||||
{
|
||||
_users[userId] = AuthorityCloneHelpers.CloneUser(user, passwordHash: passwordHash, passwordSalt: passwordSalt);
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
public Task<int> RecordFailedLoginAsync(string tenantId, Guid userId, DateTimeOffset? lockUntil = null, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_users.TryGetValue(userId, out var user) && user.TenantId == tenantId)
|
||||
{
|
||||
_users[userId] = AuthorityCloneHelpers.CloneUser(user, failedAttempts: user.FailedLoginAttempts + 1, lockedUntil: lockUntil);
|
||||
return Task.FromResult(user.FailedLoginAttempts + 1);
|
||||
}
|
||||
return Task.FromResult(0);
|
||||
}
|
||||
|
||||
public Task RecordSuccessfulLoginAsync(string tenantId, Guid userId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_users.TryGetValue(userId, out var user) && user.TenantId == tenantId)
|
||||
{
|
||||
_users[userId] = AuthorityCloneHelpers.CloneUser(user, failedAttempts: 0, lastLogin: DateTimeOffset.UtcNow, lockedUntil: null);
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public IReadOnlyCollection<UserEntity> Snapshot() => _users.Values.ToList();
|
||||
}
|
||||
|
||||
internal static class AuthorityCloneHelpers
|
||||
{
|
||||
public static TokenEntity CloneToken(
|
||||
TokenEntity source,
|
||||
Guid id,
|
||||
string tenantId,
|
||||
DateTimeOffset? revokedAt = null,
|
||||
string? revokedBy = null) =>
|
||||
new()
|
||||
{
|
||||
Id = id,
|
||||
TenantId = tenantId,
|
||||
UserId = source.UserId,
|
||||
TokenHash = source.TokenHash,
|
||||
TokenType = source.TokenType,
|
||||
Scopes = source.Scopes,
|
||||
ClientId = source.ClientId,
|
||||
IssuedAt = source.IssuedAt,
|
||||
ExpiresAt = source.ExpiresAt,
|
||||
RevokedAt = revokedAt ?? source.RevokedAt,
|
||||
RevokedBy = revokedBy ?? source.RevokedBy,
|
||||
Metadata = source.Metadata
|
||||
};
|
||||
|
||||
public static RefreshTokenEntity CloneRefresh(
|
||||
RefreshTokenEntity source,
|
||||
Guid id,
|
||||
string tenantId,
|
||||
DateTimeOffset? revokedAt = null,
|
||||
string? revokedBy = null,
|
||||
Guid? replacedBy = null) =>
|
||||
new()
|
||||
{
|
||||
Id = id,
|
||||
TenantId = tenantId,
|
||||
UserId = source.UserId,
|
||||
TokenHash = source.TokenHash,
|
||||
AccessTokenId = source.AccessTokenId,
|
||||
ClientId = source.ClientId,
|
||||
IssuedAt = source.IssuedAt,
|
||||
ExpiresAt = source.ExpiresAt,
|
||||
RevokedAt = revokedAt ?? source.RevokedAt,
|
||||
RevokedBy = revokedBy ?? source.RevokedBy,
|
||||
ReplacedBy = replacedBy ?? source.ReplacedBy,
|
||||
Metadata = source.Metadata
|
||||
};
|
||||
|
||||
public static UserEntity CloneUser(
|
||||
UserEntity source,
|
||||
string? passwordHash = null,
|
||||
string? passwordSalt = null,
|
||||
int? failedAttempts = null,
|
||||
DateTimeOffset? lockedUntil = null,
|
||||
DateTimeOffset? lastLogin = null) =>
|
||||
new()
|
||||
{
|
||||
Id = source.Id,
|
||||
TenantId = source.TenantId,
|
||||
Username = source.Username,
|
||||
Email = source.Email,
|
||||
DisplayName = source.DisplayName,
|
||||
PasswordHash = passwordHash ?? source.PasswordHash,
|
||||
PasswordSalt = passwordSalt ?? source.PasswordSalt,
|
||||
Enabled = source.Enabled,
|
||||
EmailVerified = source.EmailVerified,
|
||||
MfaEnabled = source.MfaEnabled,
|
||||
MfaSecret = source.MfaSecret,
|
||||
MfaBackupCodes = source.MfaBackupCodes,
|
||||
FailedLoginAttempts = failedAttempts ?? source.FailedLoginAttempts,
|
||||
LockedUntil = lockedUntil ?? source.LockedUntil,
|
||||
LastLoginAt = lastLogin ?? source.LastLoginAt,
|
||||
PasswordChangedAt = source.PasswordChangedAt,
|
||||
Settings = source.Settings,
|
||||
Metadata = source.Metadata,
|
||||
CreatedAt = source.CreatedAt,
|
||||
UpdatedAt = source.UpdatedAt,
|
||||
CreatedBy = source.CreatedBy
|
||||
};
|
||||
}
|
||||
@@ -25,7 +25,11 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
_repository = new TokenRepository(dataSource, NullLogger<TokenRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.TruncateAllTablesAsync();
|
||||
await SeedTenantAsync();
|
||||
}
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
@@ -46,6 +50,7 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
};
|
||||
|
||||
// Act
|
||||
await SeedUsersAsync(token.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
|
||||
@@ -61,6 +66,7 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateToken(Guid.NewGuid());
|
||||
await SeedUsersAsync(token.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
|
||||
// Act
|
||||
@@ -78,6 +84,7 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateToken(userId);
|
||||
var token2 = CreateToken(userId);
|
||||
await SeedUsersAsync(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
|
||||
@@ -93,11 +100,12 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
// Arrange
|
||||
var token = CreateToken(Guid.NewGuid());
|
||||
await SeedUsersAsync(token.UserId!.Value);
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeAsync(_tenantId, token.Id, "admin@test.com");
|
||||
var fetched = await _repository.GetByHashAsync(token.TokenHash);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, token.Id);
|
||||
|
||||
// Assert
|
||||
fetched!.RevokedAt.Should().NotBeNull();
|
||||
@@ -111,15 +119,18 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
var userId = Guid.NewGuid();
|
||||
var token1 = CreateToken(userId);
|
||||
var token2 = CreateToken(userId);
|
||||
await SeedUsersAsync(userId);
|
||||
await _repository.CreateAsync(_tenantId, token1);
|
||||
await _repository.CreateAsync(_tenantId, token2);
|
||||
|
||||
// Act
|
||||
await _repository.RevokeByUserIdAsync(_tenantId, userId, "security_action");
|
||||
var tokens = await _repository.GetByUserIdAsync(_tenantId, userId);
|
||||
var revoked1 = await _repository.GetByIdAsync(_tenantId, token1.Id);
|
||||
var revoked2 = await _repository.GetByIdAsync(_tenantId, token2.Id);
|
||||
|
||||
// Assert
|
||||
tokens.Should().AllSatisfy(t => t.RevokedAt.Should().NotBeNull());
|
||||
revoked1!.RevokedAt.Should().NotBeNull();
|
||||
revoked2!.RevokedAt.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -162,11 +173,12 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
TokenType = TokenType.Access,
|
||||
Scopes = ["a"],
|
||||
IssuedAt = issuedAt,
|
||||
ExpiresAt = issuedAt.AddHours(1)
|
||||
}
|
||||
ExpiresAt = issuedAt.AddHours(1)
|
||||
}
|
||||
};
|
||||
|
||||
// Insert out of order to ensure repository enforces deterministic ordering
|
||||
await SeedUsersAsync(userId);
|
||||
foreach (var token in tokens.Reverse())
|
||||
{
|
||||
await _repository.CreateAsync(_tenantId, token);
|
||||
@@ -198,4 +210,17 @@ public sealed class TokenRepositoryTests : IAsyncLifetime
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
|
||||
private Task SeedTenantAsync() =>
|
||||
_fixture.ExecuteSqlAsync(
|
||||
$"INSERT INTO authority.tenants (tenant_id, name, status, settings, metadata) " +
|
||||
$"VALUES ('{_tenantId}', 'Tenant {_tenantId}', 'active', '{{}}', '{{}}') " +
|
||||
"ON CONFLICT (tenant_id) DO NOTHING;");
|
||||
|
||||
private Task SeedUsersAsync(params Guid[] userIds)
|
||||
{
|
||||
var statements = string.Join("\n", userIds.Distinct().Select(id =>
|
||||
$"INSERT INTO authority.users (id, tenant_id, username, status) VALUES ('{id}', '{_tenantId}', 'user-{id:N}', 'active') ON CONFLICT (id) DO NOTHING;"));
|
||||
return _fixture.ExecuteSqlAsync(statements);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,17 +3,18 @@
|
||||
Purpose: measure basic graph load/adjacency build and shallow path exploration over deterministic fixtures.
|
||||
|
||||
## Fixtures
|
||||
- Use interim synthetic fixtures under `samples/graph/interim/graph-50k` or `graph-100k`.
|
||||
- Canonical: `samples/graph/graph-40k` (SAMPLES-GRAPH-24-003) with overlay + manifest hashes.
|
||||
- Legacy interim (still usable for comparisons): `samples/graph/interim/graph-50k` and `graph-100k`.
|
||||
- Each fixture includes `nodes.ndjson`, `edges.ndjson`, and `manifest.json` with hashes/counts.
|
||||
- Optional overlay: drop `overlay.ndjson` next to the fixture (or set `overlay.path` in `manifest.json`) to apply extra edges/layers; hashes are captured in results.
|
||||
|
||||
## Usage
|
||||
```bash
|
||||
python graph_bench.py \
|
||||
--fixture ../../../samples/graph/interim/graph-50k \
|
||||
--output results/graph-50k.json \
|
||||
--fixture ../../../../samples/graph/graph-40k \
|
||||
--output results/graph-40k.json \
|
||||
--samples 100 \
|
||||
--overlay ../../../samples/graph/interim/graph-50k/overlay.ndjson # optional
|
||||
--overlay ../../../../samples/graph/graph-40k/overlay.ndjson # optional
|
||||
```
|
||||
|
||||
Outputs a JSON summary with:
|
||||
@@ -28,6 +29,6 @@ Determinism:
|
||||
- Sorted node ids, fixed sample size, stable ordering, no randomness beyond fixture content.
|
||||
- No network access; pure local file reads.
|
||||
|
||||
Next steps (after overlay schema lands):
|
||||
- Extend to load overlay snapshots and measure overlay-join overhead.
|
||||
Next steps:
|
||||
- Keep results in sync with canonical fixture hashes; if overlay schema changes regenerate fixture + manifests.
|
||||
- Add p95/median latency over multiple runs and optional concurrency knobs.
|
||||
|
||||
44
src/Bench/StellaOps.Bench/Graph/results/graph-40k.json
Normal file
44
src/Bench/StellaOps.Bench/Graph/results/graph-40k.json
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"avg_reach_3": 14.32,
|
||||
"bfs_ms": 0.8,
|
||||
"bfs_samples": 100,
|
||||
"build_ms": 5563.14,
|
||||
"edges": 100171,
|
||||
"fixture": "graph-40k",
|
||||
"manifest": {
|
||||
"counts": {
|
||||
"edges": 100071,
|
||||
"nodes": 40000,
|
||||
"overlays": {
|
||||
"policy.overlay.v1": 100
|
||||
}
|
||||
},
|
||||
"generated_at": "2025-11-22T00:00:00Z",
|
||||
"hashes": {
|
||||
"edges_ndjson_sha256": "143a294446f46ffa273846e821f83fd5e5023aea2cf74947ba7ccaeeab7ceba4",
|
||||
"nodes_ndjson_sha256": "d14e8c642d1b4450d8779971da79cecc190af22fe237dee56ec0dd583f0442f5",
|
||||
"overlay_ndjson_sha256": "627a0d8c273f55b2426c8c005037ef01d88324a75084ad44bd620b1330a539cc"
|
||||
},
|
||||
"inputs": {
|
||||
"sbom_source": "mock-sbom-v1"
|
||||
},
|
||||
"overlay": {
|
||||
"id_scheme": "sha256(tenant|nodeId|overlayKind)",
|
||||
"kind": "policy.overlay.v1",
|
||||
"path": "overlay.ndjson"
|
||||
},
|
||||
"seed": 424242,
|
||||
"snapshot_id": "graph-40k-policy-overlay-20251122",
|
||||
"tenant": "demo-tenant"
|
||||
},
|
||||
"max_reach_3": 36,
|
||||
"nodes": 40100,
|
||||
"overlay": {
|
||||
"added_edges": 100,
|
||||
"applied": true,
|
||||
"introduced_nodes": 100,
|
||||
"path": "/mnt/e/dev/git.stella-ops.org/samples/graph/graph-40k/overlay.ndjson",
|
||||
"sha256": "627a0d8c273f55b2426c8c005037ef01d88324a75084ad44bd620b1330a539cc"
|
||||
},
|
||||
"overlay_ms": 52.24
|
||||
}
|
||||
@@ -4,7 +4,8 @@ set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
# Repo root is four levels up from Graph/
|
||||
REPO_ROOT="$(cd "${ROOT}/../../../.." && pwd)"
|
||||
FIXTURES_ROOT="${FIXTURES_ROOT:-${REPO_ROOT}/samples/graph/interim}"
|
||||
# Default to canonical graph-40k fixture; allow override or fallback to interim.
|
||||
FIXTURES_ROOT="${FIXTURES_ROOT:-${REPO_ROOT}/samples/graph}"
|
||||
OUT_DIR="${OUT_DIR:-$ROOT/results}"
|
||||
OVERLAY_ROOT="${OVERLAY_ROOT:-${FIXTURES_ROOT}}"
|
||||
SAMPLES="${SAMPLES:-100}"
|
||||
@@ -26,7 +27,16 @@ run_one() {
|
||||
python "${ROOT}/graph_bench.py" "${args[@]}"
|
||||
}
|
||||
|
||||
run_one "${FIXTURES_ROOT}/graph-50k"
|
||||
run_one "${FIXTURES_ROOT}/graph-100k"
|
||||
if [[ -d "${FIXTURES_ROOT}/graph-40k" ]]; then
|
||||
run_one "${FIXTURES_ROOT}/graph-40k"
|
||||
fi
|
||||
|
||||
# legacy/interim comparisons
|
||||
if [[ -d "${FIXTURES_ROOT}/interim/graph-50k" ]]; then
|
||||
run_one "${FIXTURES_ROOT}/interim/graph-50k"
|
||||
fi
|
||||
if [[ -d "${FIXTURES_ROOT}/interim/graph-100k" ]]; then
|
||||
run_one "${FIXTURES_ROOT}/interim/graph-100k"
|
||||
fi
|
||||
|
||||
echo "Graph bench complete. Results in ${OUT_DIR}"
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
Purpose: provide a deterministic, headless flow for measuring graph UI interactions over large fixtures (50k/100k nodes).
|
||||
|
||||
## Scope
|
||||
- Use synthetic fixtures under `samples/graph/interim/` until canonical SAMPLES-GRAPH-24-003 lands.
|
||||
- Default fixture: `samples/graph/graph-40k` (SAMPLES-GRAPH-24-003) with policy overlay hashes.
|
||||
- Legacy comparison fixtures remain under `samples/graph/interim/`.
|
||||
- Optional overlay layer (`overlay.ndjson`) is loaded when present and toggled during the run to capture render/merge overhead.
|
||||
- Drive a deterministic sequence of interactions:
|
||||
1) Load graph canvas with specified fixture.
|
||||
|
||||
@@ -49,6 +49,36 @@ public sealed record VexEvidenceListItem(
|
||||
[property: JsonPropertyName("itemCount")] int ItemCount,
|
||||
[property: JsonPropertyName("verified")] bool Verified);
|
||||
|
||||
/// <summary>
|
||||
/// Evidence Locker manifest reference returned by /evidence/vex/locker/*.
|
||||
/// </summary>
|
||||
public sealed record VexEvidenceLockerResponse(
|
||||
[property: JsonPropertyName("bundleId")] string BundleId,
|
||||
[property: JsonPropertyName("mirrorGeneration")] string MirrorGeneration,
|
||||
[property: JsonPropertyName("tenant")] string Tenant,
|
||||
[property: JsonPropertyName("publisher")] string Publisher,
|
||||
[property: JsonPropertyName("payloadHash")] string PayloadHash,
|
||||
[property: JsonPropertyName("manifestPath")] string ManifestPath,
|
||||
[property: JsonPropertyName("manifestHash")] string ManifestHash,
|
||||
[property: JsonPropertyName("evidencePath")] string EvidencePath,
|
||||
[property: JsonPropertyName("evidenceHash")] string? EvidenceHash,
|
||||
[property: JsonPropertyName("manifestSizeBytes")] long? ManifestSizeBytes,
|
||||
[property: JsonPropertyName("evidenceSizeBytes")] long? EvidenceSizeBytes,
|
||||
[property: JsonPropertyName("importedAt")] DateTimeOffset ImportedAt,
|
||||
[property: JsonPropertyName("stalenessSeconds")] int? StalenessSeconds,
|
||||
[property: JsonPropertyName("transparencyLog")] string? TransparencyLog,
|
||||
[property: JsonPropertyName("timeline")] IReadOnlyList<VexEvidenceLockerTimelineEntry> Timeline);
|
||||
|
||||
/// <summary>
|
||||
/// Timeline event for air-gapped imports.
|
||||
/// </summary>
|
||||
public sealed record VexEvidenceLockerTimelineEntry(
|
||||
[property: JsonPropertyName("eventType")] string EventType,
|
||||
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
|
||||
[property: JsonPropertyName("errorCode")] string? ErrorCode,
|
||||
[property: JsonPropertyName("message")] string? Message,
|
||||
[property: JsonPropertyName("stalenessSeconds")] int? StalenessSeconds);
|
||||
|
||||
/// <summary>
|
||||
/// Response for /evidence/vex/lookup endpoint.
|
||||
/// </summary>
|
||||
|
||||
@@ -4,6 +4,8 @@ using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
@@ -17,6 +19,7 @@ using StellaOps.Excititor.Storage.Mongo;
|
||||
using StellaOps.Excititor.WebService.Contracts;
|
||||
using StellaOps.Excititor.WebService.Services;
|
||||
using StellaOps.Excititor.WebService.Telemetry;
|
||||
using StellaOps.Excititor.WebService.Options;
|
||||
|
||||
namespace StellaOps.Excititor.WebService.Endpoints;
|
||||
|
||||
@@ -436,6 +439,115 @@ public static class EvidenceEndpoints
|
||||
|
||||
return Results.Ok(response);
|
||||
}).WithName("GetVexAdvisoryEvidence");
|
||||
|
||||
// GET /evidence/vex/locker/{bundleId}
|
||||
app.MapGet("/evidence/vex/locker/{bundleId}", async (
|
||||
HttpContext context,
|
||||
string bundleId,
|
||||
[FromQuery] string? generation,
|
||||
IOptions<VexMongoStorageOptions> storageOptions,
|
||||
IOptions<AirgapOptions> airgapOptions,
|
||||
[FromServices] IAirgapImportStore airgapImportStore,
|
||||
[FromServices] IVexHashingService hashingService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
|
||||
if (scopeResult is not null)
|
||||
{
|
||||
return scopeResult;
|
||||
}
|
||||
|
||||
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
|
||||
{
|
||||
return tenantError;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(bundleId))
|
||||
{
|
||||
return Results.BadRequest(new { error = new { code = "ERR_BUNDLE_ID", message = "bundleId is required" } });
|
||||
}
|
||||
|
||||
var record = await airgapImportStore.FindByBundleIdAsync(tenant, bundleId.Trim(), generation?.Trim(), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (record is null)
|
||||
{
|
||||
return Results.NotFound(new { error = new { code = "ERR_NOT_FOUND", message = "Locker manifest not found" } });
|
||||
}
|
||||
|
||||
// Optional local hash/size computation when locker root is configured
|
||||
long? manifestSize = null;
|
||||
long? evidenceSize = null;
|
||||
string? evidenceHash = null;
|
||||
|
||||
var lockerRoot = airgapOptions.Value.LockerRootPath;
|
||||
if (!string.IsNullOrWhiteSpace(lockerRoot))
|
||||
{
|
||||
TryHashFile(lockerRoot, record.PortableManifestPath, hashingService, out var manifestHash, out manifestSize);
|
||||
if (!string.IsNullOrWhiteSpace(manifestHash))
|
||||
{
|
||||
record.PortableManifestHash = manifestHash!;
|
||||
}
|
||||
|
||||
TryHashFile(lockerRoot, record.EvidenceLockerPath, hashingService, out evidenceHash, out evidenceSize);
|
||||
}
|
||||
|
||||
var timeline = record.Timeline
|
||||
.OrderBy(entry => entry.CreatedAt)
|
||||
.Select(entry => new VexEvidenceLockerTimelineEntry(
|
||||
entry.EventType,
|
||||
entry.CreatedAt,
|
||||
entry.ErrorCode,
|
||||
entry.Message,
|
||||
entry.StalenessSeconds))
|
||||
.ToList();
|
||||
|
||||
var response = new VexEvidenceLockerResponse(
|
||||
record.BundleId,
|
||||
record.MirrorGeneration,
|
||||
record.TenantId,
|
||||
record.Publisher,
|
||||
record.PayloadHash,
|
||||
record.PortableManifestPath,
|
||||
record.PortableManifestHash,
|
||||
record.EvidenceLockerPath,
|
||||
evidenceHash,
|
||||
manifestSize,
|
||||
evidenceSize,
|
||||
record.ImportedAt,
|
||||
record.Timeline.FirstOrDefault()?.StalenessSeconds,
|
||||
record.TransparencyLog,
|
||||
timeline);
|
||||
|
||||
return Results.Ok(response);
|
||||
}).WithName("GetVexEvidenceLockerManifest");
|
||||
}
|
||||
|
||||
private static void TryHashFile(string root, string relativePath, IVexHashingService hashingService, out string? digest, out long? size)
|
||||
{
|
||||
digest = null;
|
||||
size = null;
|
||||
try
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(relativePath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var fullPath = Path.GetFullPath(Path.Combine(root, relativePath));
|
||||
if (!File.Exists(fullPath))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var data = File.ReadAllBytes(fullPath);
|
||||
digest = hashingService.ComputeHash(data, "sha256");
|
||||
size = data.LongLength;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore I/O errors and continue with stored metadata
|
||||
}
|
||||
}
|
||||
|
||||
private static bool TryResolveTenant(HttpContext context, VexMongoStorageOptions options, out string tenant, out IResult? problem)
|
||||
|
||||
@@ -22,4 +22,12 @@ internal sealed class AirgapOptions
|
||||
/// Empty list means allow all.
|
||||
/// </summary>
|
||||
public List<string> TrustedPublishers { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Optional root path for locally stored locker artefacts (portable manifest, evidence NDJSON).
|
||||
/// When set, /evidence/vex/locker/* endpoints will attempt to read files from this root to
|
||||
/// compute deterministic hashes and sizes; otherwise only stored hashes are returned.
|
||||
/// </summary>
|
||||
public string? LockerRootPath { get; set; }
|
||||
= null;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Excititor.Storage.Mongo;
|
||||
using StellaOps.Excititor.WebService.Contracts;
|
||||
using StellaOps.Excititor.WebService.Options;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Excititor.WebService.Tests;
|
||||
|
||||
public sealed class EvidenceLockerEndpointTests : IAsyncLifetime
|
||||
{
|
||||
private readonly string _tempDir = Path.Combine(Path.GetTempPath(), "excititor-locker-tests-" + Guid.NewGuid());
|
||||
private TestWebApplicationFactory _factory = null!;
|
||||
|
||||
[Fact]
|
||||
public async Task LockerEndpoint_ReturnsHashesFromLocalFiles_WhenLockerRootConfigured()
|
||||
{
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
var manifestRel = Path.Combine("locker", "bundle-1", "g1", "manifest.json");
|
||||
var evidenceRel = Path.Combine("locker", "bundle-1", "g1", "bundle.ndjson");
|
||||
var manifestFull = Path.Combine(_tempDir, manifestRel);
|
||||
var evidenceFull = Path.Combine(_tempDir, evidenceRel);
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(manifestFull)!);
|
||||
await File.WriteAllTextAsync(manifestFull, "{\n \"id\": \"bundle-1\"\n}\n");
|
||||
await File.WriteAllTextAsync(evidenceFull, "line1\nline2\n");
|
||||
|
||||
var record = new AirgapImportRecord
|
||||
{
|
||||
Id = "bundle-1:g1",
|
||||
TenantId = "test",
|
||||
BundleId = "bundle-1",
|
||||
MirrorGeneration = "g1",
|
||||
Publisher = "test-pub",
|
||||
PayloadHash = "sha256:payload",
|
||||
Signature = "sig",
|
||||
PortableManifestPath = manifestRel,
|
||||
PortableManifestHash = "sha256:old",
|
||||
EvidenceLockerPath = evidenceRel,
|
||||
ImportedAt = DateTimeOffset.UtcNow,
|
||||
SignedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
var stub = (StubAirgapImportStore)_factory.Services.GetRequiredService<IAirgapImportStore>();
|
||||
await stub.SaveAsync(record, CancellationToken.None);
|
||||
|
||||
using var client = _factory.WithWebHostBuilder(_ => { }).CreateClient();
|
||||
|
||||
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", "vex.read");
|
||||
|
||||
var response = await client.GetAsync($"/evidence/vex/locker/{record.BundleId}");
|
||||
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<VexEvidenceLockerResponse>();
|
||||
Assert.NotNull(payload);
|
||||
Assert.Equal("test", payload!.Tenant);
|
||||
Assert.Equal(record.BundleId, payload.BundleId);
|
||||
Assert.Equal("sha256:22734ec66c856d27d0023839d8ea11cdeaac379496952e52d204b3265981af66", payload.ManifestHash);
|
||||
Assert.Equal("sha256:2751a3a2f303ad21752038085e2b8c5f98ecff61a2e4ebbd43506a941725be80", payload.EvidenceHash);
|
||||
Assert.Equal(23, payload.ManifestSizeBytes);
|
||||
Assert.Equal(12, payload.EvidenceSizeBytes);
|
||||
}
|
||||
|
||||
public Task InitializeAsync()
|
||||
{
|
||||
_factory = new TestWebApplicationFactory(
|
||||
configureConfiguration: config =>
|
||||
{
|
||||
config.AddInMemoryCollection(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>("Excititor:Airgap:LockerRootPath", _tempDir)
|
||||
});
|
||||
},
|
||||
configureServices: services =>
|
||||
{
|
||||
services.RemoveAll<IAirgapImportStore>();
|
||||
services.AddSingleton<IAirgapImportStore>(new StubAirgapImportStore());
|
||||
});
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignore cleanup errors
|
||||
}
|
||||
|
||||
_factory.Dispose();
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private sealed class StubAirgapImportStore : IAirgapImportStore
|
||||
{
|
||||
private AirgapImportRecord? _record;
|
||||
|
||||
public Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_record = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<AirgapImportRecord?> FindByBundleIdAsync(string tenantId, string bundleId, string? mirrorGeneration, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(_record);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<AirgapImportRecord>> ListAsync(string tenantId, string? publisherFilter, DateTimeOffset? importedAfter, int limit, int offset, CancellationToken cancellationToken)
|
||||
{
|
||||
IReadOnlyList<AirgapImportRecord> list = _record is null ? Array.Empty<AirgapImportRecord>() : new[] { _record };
|
||||
return Task.FromResult(list);
|
||||
}
|
||||
|
||||
public Task<int> CountAsync(string tenantId, string? publisherFilter, DateTimeOffset? importedAfter, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(_record is null ? 0 : 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ namespace StellaOps.ExportCenter.RiskBundles;
|
||||
public sealed class RiskBundleBuilder
|
||||
{
|
||||
private const string ManifestVersion = "1";
|
||||
private static readonly string[] MandatoryProviderIds = { "cisa-kev" };
|
||||
private static readonly UnixFileMode DefaultFileMode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
|
||||
private static readonly DateTimeOffset FixedTimestamp = new(2024, 01, 01, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
@@ -54,6 +55,7 @@ public sealed class RiskBundleBuilder
|
||||
private static List<RiskBundleProviderEntry> CollectProviders(RiskBundleBuildRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
var entries = new List<RiskBundleProviderEntry>(request.Providers.Count);
|
||||
var seen = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var provider in request.Providers.OrderBy(p => p.ProviderId, StringComparer.Ordinal))
|
||||
{
|
||||
@@ -88,17 +90,26 @@ public sealed class RiskBundleBuilder
|
||||
var sha256 = ComputeSha256FromFile(fullPath);
|
||||
var size = new FileInfo(fullPath).Length;
|
||||
var bundlePath = $"providers/{provider.ProviderId}/snapshot";
|
||||
var signaturePath = ResolveSignaturePath(provider);
|
||||
string? signatureSha256 = null;
|
||||
if (!string.IsNullOrWhiteSpace(signaturePath) && File.Exists(signaturePath))
|
||||
{
|
||||
signatureSha256 = ComputeSha256FromFile(signaturePath);
|
||||
}
|
||||
|
||||
seen.Add(provider.ProviderId);
|
||||
|
||||
entries.Add(new RiskBundleProviderEntry(
|
||||
provider.ProviderId,
|
||||
provider.Source,
|
||||
provider.SnapshotDate,
|
||||
sha256,
|
||||
signatureSha256,
|
||||
size,
|
||||
provider.Optional,
|
||||
bundlePath,
|
||||
fullPath,
|
||||
SignaturePath: null));
|
||||
signaturePath));
|
||||
}
|
||||
|
||||
if (entries.Count == 0)
|
||||
@@ -106,6 +117,14 @@ public sealed class RiskBundleBuilder
|
||||
throw new InvalidOperationException("No provider artefacts collected. Provide at least one valid provider input.");
|
||||
}
|
||||
|
||||
foreach (var mandatory in MandatoryProviderIds)
|
||||
{
|
||||
if (!seen.Contains(mandatory))
|
||||
{
|
||||
throw new InvalidOperationException($"Mandatory provider '{mandatory}' is missing from build inputs.");
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
@@ -135,6 +154,8 @@ public sealed class RiskBundleBuilder
|
||||
.Append(entry.Optional ? '1' : '0')
|
||||
.Append('\0')
|
||||
.Append(entry.SnapshotDate?.ToString("yyyy-MM-dd") ?? string.Empty)
|
||||
.Append('\0')
|
||||
.Append(entry.SignatureSha256 ?? string.Empty)
|
||||
.Append('\0');
|
||||
}
|
||||
|
||||
@@ -207,6 +228,18 @@ public sealed class RiskBundleBuilder
|
||||
DataStream = dataStream
|
||||
};
|
||||
writer.WriteEntry(tarEntry);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(entry.SignaturePath) && File.Exists(entry.SignaturePath))
|
||||
{
|
||||
using var sigStream = new FileStream(entry.SignaturePath, FileMode.Open, FileAccess.Read, FileShare.Read, 64 * 1024, FileOptions.SequentialScan);
|
||||
var sigEntry = new PaxTarEntry(TarEntryType.RegularFile, $"{Path.GetDirectoryName(entry.BundlePath)?.TrimEnd('/')}/signature")
|
||||
{
|
||||
Mode = DefaultFileMode,
|
||||
ModificationTime = FixedTimestamp,
|
||||
DataStream = sigStream
|
||||
};
|
||||
writer.WriteEntry(sigEntry);
|
||||
}
|
||||
}
|
||||
|
||||
private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode)
|
||||
@@ -238,4 +271,15 @@ public sealed class RiskBundleBuilder
|
||||
stream.Write(buffer);
|
||||
stream.Position = originalPosition;
|
||||
}
|
||||
|
||||
private static string? ResolveSignaturePath(RiskBundleProviderInput provider)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(provider.SignaturePath))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var full = Path.GetFullPath(provider.SignaturePath);
|
||||
return File.Exists(full) ? full : null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ public sealed record RiskBundleProviderInput(
|
||||
string ProviderId,
|
||||
string SourcePath,
|
||||
string Source,
|
||||
string? SignaturePath = null,
|
||||
bool Optional = false,
|
||||
DateOnly? SnapshotDate = null);
|
||||
|
||||
@@ -14,6 +15,7 @@ public sealed record RiskBundleProviderEntry(
|
||||
string Source,
|
||||
DateOnly? SnapshotDate,
|
||||
string Sha256,
|
||||
string? SignatureSha256,
|
||||
long SizeBytes,
|
||||
bool Optional,
|
||||
string BundlePath,
|
||||
|
||||
@@ -10,14 +10,15 @@ public sealed class RiskBundleBuilderTests
|
||||
{
|
||||
using var temp = new TempDir();
|
||||
var kev = temp.WriteFile("kev.json", "{\"cve\":\"CVE-0001\"}");
|
||||
var kevSig = temp.WriteFile("kev.sig", "sig");
|
||||
var epss = temp.WriteFile("epss.csv", "cve,score\nCVE-0001,0.12\n");
|
||||
|
||||
var request = new RiskBundleBuildRequest(
|
||||
BundleId: Guid.Parse("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"),
|
||||
Providers: new[]
|
||||
{
|
||||
new RiskBundleProviderInput("cisa-kev", kev, "CISA KEV"),
|
||||
new RiskBundleProviderInput("first-epss", epss, "FIRST EPSS")
|
||||
new RiskBundleProviderInput("cisa-kev", kev, "CISA KEV", SignaturePath: kevSig),
|
||||
new RiskBundleProviderInput("first-epss", epss, "FIRST EPSS", Optional: true)
|
||||
});
|
||||
|
||||
var builder = new RiskBundleBuilder();
|
||||
@@ -26,6 +27,8 @@ public sealed class RiskBundleBuilderTests
|
||||
|
||||
Assert.Equal(2, result.Manifest.Providers.Count);
|
||||
Assert.Equal(new[] { "cisa-kev", "first-epss" }, result.Manifest.Providers.Select(p => p.ProviderId));
|
||||
Assert.NotNull(result.Manifest.Providers[0].SignatureSha256);
|
||||
Assert.Equal("providers/cisa-kev/snapshot", result.Manifest.Providers[0].BundlePath);
|
||||
|
||||
// Manifest hash stable
|
||||
var second = builder.Build(request, cancellation);
|
||||
@@ -44,6 +47,22 @@ public sealed class RiskBundleBuilderTests
|
||||
Assert.Contains("manifests/provider-manifest.json", entries);
|
||||
Assert.Contains("providers/cisa-kev/snapshot", entries);
|
||||
Assert.Contains("providers/first-epss/snapshot", entries);
|
||||
Assert.Contains("providers/cisa-kev/signature", entries);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_WhenMandatoryProviderMissing_Throws()
|
||||
{
|
||||
using var temp = new TempDir();
|
||||
var epss = temp.WriteFile("epss.csv", "cve,score\n");
|
||||
|
||||
var request = new RiskBundleBuildRequest(
|
||||
Guid.NewGuid(),
|
||||
Providers: new[] { new RiskBundleProviderInput("first-epss", epss, "FIRST EPSS", Optional: true) });
|
||||
|
||||
var builder = new RiskBundleBuilder();
|
||||
|
||||
Assert.Throws<InvalidOperationException>(() => builder.Build(request, TestContext.Current.CancellationToken));
|
||||
}
|
||||
|
||||
private sealed class TempDir : IDisposable
|
||||
|
||||
19
src/Notify/AGENTS.md
Normal file
19
src/Notify/AGENTS.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# StellaOps.Notify — Agent Charter
|
||||
|
||||
## Mission
|
||||
Deliver and operate the Notify module across WebService, Worker, and storage layers with PostgreSQL as the primary backing store while keeping Mongo fallbacks only where explicitly gated.
|
||||
|
||||
## Required Reading
|
||||
- docs/modules/notify/architecture.md
|
||||
- docs/db/README.md
|
||||
- docs/db/SPECIFICATION.md (Notify schema §5.5)
|
||||
- docs/db/RULES.md
|
||||
- docs/db/VERIFICATION.md
|
||||
- docs/modules/platform/architecture-overview.md
|
||||
|
||||
## Working Agreement
|
||||
- Update sprint rows in `docs/implplan/SPRINT_*.md` with TODO → DOING → DONE/BLOCKED as work progresses; log blockers in **Decisions & Risks**.
|
||||
- Offline/deterministic posture: stable ordering, UTC timestamps, idempotent migrations; use curated NuGet cache `local-nugets/`.
|
||||
- Storage: keep schema/tests aligned to `notify` schema; when running tests locally ensure Docker/WSL integration for Testcontainers.
|
||||
- Testing: prefer integration suites under `src/Notify/__Tests/StellaOps.Notify.Storage.Postgres.Tests`; add coverage for new repositories or state transitions; keep results under `out/test-results/` when capturing evidence.
|
||||
- Cross-module edits require explicit sprint note; otherwise stay within `src/Notify/**` and shared libraries listed in module docs.
|
||||
@@ -12,7 +12,9 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly DeliveryRepository _repository;
|
||||
private readonly ChannelRepository _channelRepository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
private readonly Guid _channelId = Guid.NewGuid();
|
||||
|
||||
public DeliveryRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
@@ -22,15 +24,33 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new DeliveryRepository(dataSource, NullLogger<DeliveryRepository>.Instance);
|
||||
_channelRepository = new ChannelRepository(dataSource, NullLogger<ChannelRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task InitializeAsync() => Task.CompletedTask;
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
private async Task ResetAsync()
|
||||
{
|
||||
await _fixture.ExecuteSqlAsync("TRUNCATE TABLE notify.audit, notify.deliveries, notify.digests, notify.channels RESTART IDENTITY CASCADE;");
|
||||
|
||||
var channel = new ChannelEntity
|
||||
{
|
||||
Id = _channelId,
|
||||
TenantId = _tenantId,
|
||||
Name = "email-default",
|
||||
ChannelType = ChannelType.Email,
|
||||
Enabled = true
|
||||
};
|
||||
|
||||
await _channelRepository.CreateAsync(channel);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateAndGetById_RoundTripsDelivery()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var delivery = CreateDelivery();
|
||||
|
||||
// Act
|
||||
@@ -48,6 +68,7 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task GetPending_ReturnsPendingDeliveries()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var pending = CreateDelivery();
|
||||
await _repository.CreateAsync(pending);
|
||||
|
||||
@@ -63,7 +84,8 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task GetByStatus_ReturnsDeliveriesWithStatus()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await ResetAsync();
|
||||
var delivery = CreateDelivery(maxAttempts: 1);
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
// Act
|
||||
@@ -78,12 +100,13 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task GetByCorrelationId_ReturnsCorrelatedDeliveries()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var correlationId = Guid.NewGuid().ToString();
|
||||
var delivery = new DeliveryEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
ChannelId = _channelId,
|
||||
Recipient = "user@example.com",
|
||||
EventType = "scan.completed",
|
||||
CorrelationId = correlationId
|
||||
@@ -102,6 +125,7 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task MarkQueued_UpdatesStatus()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
@@ -119,6 +143,7 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task MarkSent_UpdatesStatusAndExternalId()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
await _repository.MarkQueuedAsync(_tenantId, delivery.Id);
|
||||
@@ -138,8 +163,10 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task MarkDelivered_UpdatesStatus()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var delivery = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery);
|
||||
await _repository.MarkQueuedAsync(_tenantId, delivery.Id);
|
||||
await _repository.MarkSentAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Act
|
||||
@@ -156,28 +183,36 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
public async Task MarkFailed_UpdatesStatusAndError()
|
||||
{
|
||||
// Arrange
|
||||
var delivery = CreateDelivery();
|
||||
await ResetAsync();
|
||||
var delivery = CreateDelivery(maxAttempts: 1);
|
||||
await _repository.CreateAsync(delivery);
|
||||
|
||||
// Act
|
||||
var result = await _repository.MarkFailedAsync(_tenantId, delivery.Id, "Connection timeout", TimeSpan.FromMinutes(5));
|
||||
var result = await _repository.MarkFailedAsync(_tenantId, delivery.Id, "Connection timeout", retryDelay: TimeSpan.Zero);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, delivery.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.Status.Should().Be(DeliveryStatus.Failed);
|
||||
fetched.ErrorMessage.Should().Be("Connection timeout");
|
||||
fetched.FailedAt.Should().NotBeNull();
|
||||
fetched.Should().NotBeNull();
|
||||
fetched!.ErrorMessage.Should().Be("Connection timeout");
|
||||
fetched.Attempt.Should().BeGreaterThan(0);
|
||||
fetched.Status.Should().BeOneOf(DeliveryStatus.Failed, DeliveryStatus.Pending);
|
||||
if (fetched.Status == DeliveryStatus.Failed)
|
||||
{
|
||||
fetched.FailedAt.Should().NotBeNull();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStats_ReturnsCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var delivery1 = CreateDelivery();
|
||||
var delivery2 = CreateDelivery();
|
||||
await _repository.CreateAsync(delivery1);
|
||||
await _repository.CreateAsync(delivery2);
|
||||
await _repository.MarkQueuedAsync(_tenantId, delivery2.Id);
|
||||
await _repository.MarkSentAsync(_tenantId, delivery2.Id);
|
||||
|
||||
var from = DateTimeOffset.UtcNow.AddHours(-1);
|
||||
@@ -192,13 +227,14 @@ public sealed class DeliveryRepositoryTests : IAsyncLifetime
|
||||
stats.Sent.Should().Be(1);
|
||||
}
|
||||
|
||||
private DeliveryEntity CreateDelivery() => new()
|
||||
private DeliveryEntity CreateDelivery(int maxAttempts = 3) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
ChannelId = _channelId,
|
||||
Recipient = "user@example.com",
|
||||
EventType = "scan.completed",
|
||||
Status = DeliveryStatus.Pending
|
||||
Status = DeliveryStatus.Pending,
|
||||
MaxAttempts = maxAttempts
|
||||
};
|
||||
}
|
||||
|
||||
@@ -12,7 +12,9 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly NotifyPostgresFixture _fixture;
|
||||
private readonly DigestRepository _repository;
|
||||
private readonly ChannelRepository _channelRepository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
private readonly Guid _channelId = Guid.NewGuid();
|
||||
|
||||
public DigestRepositoryTests(NotifyPostgresFixture fixture)
|
||||
{
|
||||
@@ -22,20 +24,38 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new NotifyDataSource(Options.Create(options), NullLogger<NotifyDataSource>.Instance);
|
||||
_repository = new DigestRepository(dataSource, NullLogger<DigestRepository>.Instance);
|
||||
_channelRepository = new ChannelRepository(dataSource, NullLogger<ChannelRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task InitializeAsync() => Task.CompletedTask;
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
private async Task ResetAsync()
|
||||
{
|
||||
await _fixture.ExecuteSqlAsync("TRUNCATE TABLE notify.audit, notify.deliveries, notify.digests, notify.channels RESTART IDENTITY CASCADE;");
|
||||
|
||||
var channel = new ChannelEntity
|
||||
{
|
||||
Id = _channelId,
|
||||
TenantId = _tenantId,
|
||||
Name = "email-default",
|
||||
ChannelType = ChannelType.Email,
|
||||
Enabled = true
|
||||
};
|
||||
|
||||
await _channelRepository.CreateAsync(channel);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpsertAndGetById_RoundTripsDigest()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var digest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
ChannelId = _channelId,
|
||||
Recipient = "user@example.com",
|
||||
DigestKey = "daily-summary",
|
||||
EventCount = 0,
|
||||
@@ -58,12 +78,12 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
public async Task GetByKey_ReturnsCorrectDigest()
|
||||
{
|
||||
// Arrange
|
||||
var channelId = Guid.NewGuid();
|
||||
await ResetAsync();
|
||||
var digest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = channelId,
|
||||
ChannelId = _channelId,
|
||||
Recipient = "user@example.com",
|
||||
DigestKey = "weekly-report",
|
||||
CollectUntil = DateTimeOffset.UtcNow.AddDays(7)
|
||||
@@ -71,7 +91,7 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
// Act
|
||||
var fetched = await _repository.GetByKeyAsync(_tenantId, channelId, "user@example.com", "weekly-report");
|
||||
var fetched = await _repository.GetByKeyAsync(_tenantId, _channelId, "user@example.com", "weekly-report");
|
||||
|
||||
// Assert
|
||||
fetched.Should().NotBeNull();
|
||||
@@ -82,6 +102,7 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
public async Task AddEvent_IncrementsEventCount()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var digest = CreateDigest("event-test");
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
@@ -98,11 +119,12 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
public async Task GetReadyToSend_ReturnsDigestsReadyToSend()
|
||||
{
|
||||
// Arrange - One ready digest (past CollectUntil), one not ready
|
||||
await ResetAsync();
|
||||
var readyDigest = new DigestEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
ChannelId = _channelId,
|
||||
Recipient = "ready@example.com",
|
||||
DigestKey = "ready",
|
||||
Status = DigestStatus.Collecting,
|
||||
@@ -112,7 +134,7 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
ChannelId = _channelId,
|
||||
Recipient = "notready@example.com",
|
||||
DigestKey = "notready",
|
||||
Status = DigestStatus.Collecting,
|
||||
@@ -133,6 +155,7 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
public async Task MarkSending_UpdatesStatus()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var digest = CreateDigest("sending-test");
|
||||
await _repository.UpsertAsync(digest);
|
||||
|
||||
@@ -149,6 +172,7 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
public async Task MarkSent_UpdatesStatusAndSentAt()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var digest = CreateDigest("sent-test");
|
||||
await _repository.UpsertAsync(digest);
|
||||
await _repository.MarkSendingAsync(_tenantId, digest.Id);
|
||||
@@ -167,8 +191,11 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
public async Task DeleteOld_RemovesOldDigests()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var digest = CreateDigest("old-digest");
|
||||
await _repository.UpsertAsync(digest);
|
||||
await _repository.MarkSendingAsync(_tenantId, digest.Id);
|
||||
await _repository.MarkSentAsync(_tenantId, digest.Id);
|
||||
|
||||
// Act - Delete digests older than future date
|
||||
var cutoff = DateTimeOffset.UtcNow.AddMinutes(1);
|
||||
@@ -182,7 +209,7 @@ public sealed class DigestRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
ChannelId = Guid.NewGuid(),
|
||||
ChannelId = _channelId,
|
||||
Recipient = "user@example.com",
|
||||
DigestKey = key,
|
||||
Status = DigestStatus.Collecting,
|
||||
|
||||
@@ -24,13 +24,16 @@ public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
_repository = new NotifyAuditRepository(dataSource, NullLogger<NotifyAuditRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task InitializeAsync() => Task.CompletedTask;
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
private Task ResetAsync() => _fixture.ExecuteSqlAsync("TRUNCATE TABLE notify.audit, notify.deliveries, notify.digests, notify.channels RESTART IDENTITY CASCADE;");
|
||||
|
||||
[Fact]
|
||||
public async Task Create_ReturnsGeneratedId()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var audit = new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
@@ -51,6 +54,7 @@ public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
public async Task List_ReturnsAuditEntriesOrderedByCreatedAtDesc()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var audit1 = CreateAudit("action1");
|
||||
var audit2 = CreateAudit("action2");
|
||||
await _repository.CreateAsync(audit1);
|
||||
@@ -69,6 +73,7 @@ public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
public async Task GetByResource_ReturnsResourceAudits()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var resourceId = Guid.NewGuid().ToString();
|
||||
var audit = new NotifyAuditEntity
|
||||
{
|
||||
@@ -91,6 +96,7 @@ public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
public async Task GetByResource_WithoutResourceId_ReturnsAllOfType()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
await _repository.CreateAsync(new NotifyAuditEntity
|
||||
{
|
||||
TenantId = _tenantId,
|
||||
@@ -117,6 +123,7 @@ public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
public async Task GetByCorrelationId_ReturnsCorrelatedAudits()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
var correlationId = Guid.NewGuid().ToString();
|
||||
var audit1 = new NotifyAuditEntity
|
||||
{
|
||||
@@ -147,6 +154,7 @@ public sealed class NotifyAuditRepositoryTests : IAsyncLifetime
|
||||
public async Task DeleteOld_RemovesOldAudits()
|
||||
{
|
||||
// Arrange
|
||||
await ResetAsync();
|
||||
await _repository.CreateAsync(CreateAudit("old-action"));
|
||||
|
||||
// Act - Delete audits older than future date
|
||||
|
||||
@@ -8,12 +8,13 @@
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.11.1" />
|
||||
<PackageReference Include="Moq" Version="4.20.70" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
|
||||
@@ -26,7 +26,10 @@ internal sealed record BatchEvaluationItemDto(
|
||||
SbomDto Sbom,
|
||||
ExceptionsDto Exceptions,
|
||||
ReachabilityDto Reachability,
|
||||
DateTimeOffset? EvaluationTimestamp,
|
||||
string? EntropyLayerSummary = null,
|
||||
string? EntropyReport = null,
|
||||
bool? ProvenanceAttested = null,
|
||||
DateTimeOffset? EvaluationTimestamp = null,
|
||||
bool BypassCache = false);
|
||||
|
||||
internal sealed record EvaluationSeverityDto(string Normalized, decimal? Score = null);
|
||||
@@ -207,6 +210,9 @@ internal static class BatchEvaluationMapper
|
||||
Sbom: sbom,
|
||||
Exceptions: exceptions,
|
||||
Reachability: reachability,
|
||||
EntropyLayerSummary: item.EntropyLayerSummary,
|
||||
EntropyReport: item.EntropyReport,
|
||||
ProvenanceAttested: item.ProvenanceAttested,
|
||||
EvaluationTimestamp: item.EvaluationTimestamp,
|
||||
BypassCache: item.BypassCache);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ConsoleSurface;
|
||||
|
||||
internal sealed record ConsoleSimulationDiffRequest(
|
||||
[property: JsonPropertyName("baselinePolicyVersion")] string BaselinePolicyVersion,
|
||||
[property: JsonPropertyName("candidatePolicyVersion")] string CandidatePolicyVersion,
|
||||
[property: JsonPropertyName("artifactScope")] IReadOnlyList<ConsoleArtifactScope>? ArtifactScope,
|
||||
[property: JsonPropertyName("filters")] ConsoleSimulationFilters? Filters,
|
||||
[property: JsonPropertyName("budget")] ConsoleSimulationBudget? Budget,
|
||||
[property: JsonPropertyName("evaluationTimestamp")] DateTimeOffset EvaluationTimestamp);
|
||||
|
||||
internal sealed record ConsoleArtifactScope(
|
||||
[property: JsonPropertyName("artifactDigest")] string ArtifactDigest,
|
||||
[property: JsonPropertyName("purl")] string? Purl = null,
|
||||
[property: JsonPropertyName("advisoryId")] string? AdvisoryId = null);
|
||||
|
||||
internal sealed record ConsoleSimulationFilters(
|
||||
[property: JsonPropertyName("severityBand")] IReadOnlyList<string>? SeverityBand = null,
|
||||
[property: JsonPropertyName("ruleId")] IReadOnlyList<string>? RuleId = null);
|
||||
|
||||
internal sealed record ConsoleSimulationBudget(
|
||||
[property: JsonPropertyName("maxFindings")] int? MaxFindings = null,
|
||||
[property: JsonPropertyName("maxExplainSamples")] int? MaxExplainSamples = null);
|
||||
|
||||
internal sealed record ConsoleSimulationDiffResponse(
|
||||
[property: JsonPropertyName("schemaVersion")] string SchemaVersion,
|
||||
[property: JsonPropertyName("summary")] ConsoleDiffSummary Summary,
|
||||
[property: JsonPropertyName("ruleImpact")] IReadOnlyList<ConsoleRuleImpact> RuleImpact,
|
||||
[property: JsonPropertyName("samples")] ConsoleDiffSamples Samples,
|
||||
[property: JsonPropertyName("provenance")] ConsoleDiffProvenance Provenance);
|
||||
|
||||
internal sealed record ConsoleDiffSummary(
|
||||
[property: JsonPropertyName("before")] ConsoleSeverityBreakdown Before,
|
||||
[property: JsonPropertyName("after")] ConsoleSeverityBreakdown After,
|
||||
[property: JsonPropertyName("delta")] ConsoleDiffDelta Delta);
|
||||
|
||||
internal sealed record ConsoleSeverityBreakdown(
|
||||
[property: JsonPropertyName("total")] int Total,
|
||||
[property: JsonPropertyName("severity")] ImmutableDictionary<string, int> Severity);
|
||||
|
||||
internal sealed record ConsoleDiffDelta(
|
||||
[property: JsonPropertyName("added")] int Added,
|
||||
[property: JsonPropertyName("removed")] int Removed,
|
||||
[property: JsonPropertyName("regressed")] int Regressed);
|
||||
|
||||
internal sealed record ConsoleRuleImpact(
|
||||
[property: JsonPropertyName("ruleId")] string RuleId,
|
||||
[property: JsonPropertyName("added")] int Added,
|
||||
[property: JsonPropertyName("removed")] int Removed,
|
||||
[property: JsonPropertyName("severityShift")] ImmutableDictionary<string, int> SeverityShift);
|
||||
|
||||
internal sealed record ConsoleDiffSamples(
|
||||
[property: JsonPropertyName("explain")] ImmutableArray<string> Explain,
|
||||
[property: JsonPropertyName("findings")] ImmutableArray<string> Findings);
|
||||
|
||||
internal sealed record ConsoleDiffProvenance(
|
||||
[property: JsonPropertyName("baselinePolicyVersion")] string BaselinePolicyVersion,
|
||||
[property: JsonPropertyName("candidatePolicyVersion")] string CandidatePolicyVersion,
|
||||
[property: JsonPropertyName("evaluationTimestamp")] DateTimeOffset EvaluationTimestamp);
|
||||
@@ -0,0 +1,237 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using StellaOps.Policy.Engine.Simulation;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ConsoleSurface;
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic simulation diff metadata for Console surfaces (POLICY-CONSOLE-23-002).
|
||||
/// Generates stable before/after counts, rule impact, and samples without relying on
|
||||
/// wall-clock or external data. Intended as a contract-aligned shim until the
|
||||
/// Console surface is wired to live evaluation outputs.
|
||||
/// </summary>
|
||||
internal sealed class ConsoleSimulationDiffService
|
||||
{
|
||||
private static readonly string[] SeverityOrder = { "critical", "high", "medium", "low", "unknown" };
|
||||
private static readonly string[] Outcomes = { "deny", "block", "warn", "allow" };
|
||||
private static readonly string SchemaVersion = "console-policy-23-001";
|
||||
|
||||
private readonly SimulationAnalyticsService _analytics;
|
||||
|
||||
public ConsoleSimulationDiffService(SimulationAnalyticsService analytics)
|
||||
{
|
||||
_analytics = analytics ?? throw new ArgumentNullException(nameof(analytics));
|
||||
}
|
||||
|
||||
public ConsoleSimulationDiffResponse Compute(ConsoleSimulationDiffRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var artifacts = (request.ArtifactScope?.Count ?? 0) == 0
|
||||
? new[] { new ConsoleArtifactScope("sha256:default") }
|
||||
: request.ArtifactScope!;
|
||||
|
||||
// Respect budget caps if provided
|
||||
var maxFindings = Math.Clamp(request.Budget?.MaxFindings ?? 5, 1, 50_000);
|
||||
var maxSamples = Math.Clamp(request.Budget?.MaxExplainSamples ?? 20, 0, 200);
|
||||
|
||||
var baselineFindings = BuildFindings(request.BaselinePolicyVersion, artifacts, maxFindings, seed: 1);
|
||||
var candidateFindings = BuildFindings(request.CandidatePolicyVersion, artifacts, maxFindings, seed: 2);
|
||||
|
||||
// Delta summary for regressions/added/removed
|
||||
var delta = _analytics.ComputeDeltaSummary(
|
||||
request.BaselinePolicyVersion,
|
||||
request.CandidatePolicyVersion,
|
||||
baselineFindings,
|
||||
candidateFindings);
|
||||
|
||||
var beforeBreakdown = BuildSeverityBreakdown(baselineFindings);
|
||||
var afterBreakdown = BuildSeverityBreakdown(candidateFindings);
|
||||
|
||||
var added = candidateFindings.Count(c => baselineFindings.All(b => b.FindingId != c.FindingId));
|
||||
var removed = baselineFindings.Count(b => candidateFindings.All(c => c.FindingId != b.FindingId));
|
||||
var regressed = delta.SeverityChanges.Escalated;
|
||||
|
||||
var ruleImpact = BuildRuleImpact(baselineFindings, candidateFindings);
|
||||
var samples = BuildSamples(candidateFindings, maxSamples);
|
||||
|
||||
var response = new ConsoleSimulationDiffResponse(
|
||||
SchemaVersion,
|
||||
new ConsoleDiffSummary(
|
||||
Before: beforeBreakdown,
|
||||
After: afterBreakdown,
|
||||
Delta: new ConsoleDiffDelta(added, removed, regressed)),
|
||||
ruleImpact,
|
||||
samples,
|
||||
new ConsoleDiffProvenance(
|
||||
request.BaselinePolicyVersion,
|
||||
request.CandidatePolicyVersion,
|
||||
request.EvaluationTimestamp));
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<SimulationFindingResult> BuildFindings(
|
||||
string policyVersion,
|
||||
IReadOnlyList<ConsoleArtifactScope> artifacts,
|
||||
int maxFindings,
|
||||
int seed)
|
||||
{
|
||||
var results = new List<SimulationFindingResult>();
|
||||
|
||||
foreach (var artifact in artifacts.OrderBy(a => a.ArtifactDigest, StringComparer.Ordinal))
|
||||
{
|
||||
var baseSeed = HashToBytes($"{policyVersion}:{artifact.ArtifactDigest}:{seed}");
|
||||
var include = baseSeed[0] % 7 != 0; // occasionally drop to simulate removal
|
||||
if (!include)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var findingId = CreateDeterministicId("fid", policyVersion, artifact.ArtifactDigest, seed.ToString());
|
||||
var severity = SeverityOrder[baseSeed[1] % SeverityOrder.Length];
|
||||
var outcome = Outcomes[baseSeed[2] % Outcomes.Length];
|
||||
var ruleId = $"RULE-{(baseSeed[3] % 9000) + 1000}";
|
||||
|
||||
results.Add(new SimulationFindingResult(
|
||||
FindingId: findingId,
|
||||
ComponentPurl: artifact.Purl ?? "pkg:generic/unknown@0.0.0",
|
||||
AdvisoryId: artifact.AdvisoryId ?? "unknown",
|
||||
Outcome: outcome,
|
||||
Severity: severity,
|
||||
FiredRules: new[] { ruleId }));
|
||||
|
||||
// Add a secondary finding for variability if budget allows
|
||||
if (results.Count < maxFindings && baseSeed[4] % 5 == 0)
|
||||
{
|
||||
var secondaryId = CreateDeterministicId("fid", policyVersion, artifact.ArtifactDigest, seed + "-b");
|
||||
var secondaryRule = $"RULE-{(baseSeed[5] % 9000) + 1000}";
|
||||
var secondarySeverity = SeverityOrder[(baseSeed[6] + seed) % SeverityOrder.Length];
|
||||
|
||||
results.Add(new SimulationFindingResult(
|
||||
FindingId: secondaryId,
|
||||
ComponentPurl: artifact.Purl ?? "pkg:generic/unknown@0.0.0",
|
||||
AdvisoryId: artifact.AdvisoryId ?? "unknown",
|
||||
Outcome: Outcomes[(baseSeed[7] + seed) % Outcomes.Length],
|
||||
Severity: secondarySeverity,
|
||||
FiredRules: new[] { secondaryRule }));
|
||||
}
|
||||
|
||||
if (results.Count >= maxFindings)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static ConsoleSeverityBreakdown BuildSeverityBreakdown(IReadOnlyList<SimulationFindingResult> findings)
|
||||
{
|
||||
var counts = SeverityOrder.ToDictionary(s => s, _ => 0, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var finding in findings)
|
||||
{
|
||||
var severity = finding.Severity ?? "unknown";
|
||||
counts.TryGetValue(severity, out var current);
|
||||
counts[severity] = current + 1;
|
||||
}
|
||||
|
||||
return new ConsoleSeverityBreakdown(
|
||||
Total: findings.Count,
|
||||
Severity: counts.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static IReadOnlyList<ConsoleRuleImpact> BuildRuleImpact(
|
||||
IReadOnlyList<SimulationFindingResult> baseline,
|
||||
IReadOnlyList<SimulationFindingResult> candidate)
|
||||
{
|
||||
var ruleImpact = new Dictionary<string, (int added, int removed, Dictionary<string, int> shifts)>(StringComparer.Ordinal);
|
||||
|
||||
var baseMap = baseline.ToDictionary(f => f.FindingId, f => f, StringComparer.Ordinal);
|
||||
|
||||
foreach (var result in candidate)
|
||||
{
|
||||
var ruleId = result.FiredRules?.FirstOrDefault() ?? "RULE-0000";
|
||||
if (!ruleImpact.TryGetValue(ruleId, out var entry))
|
||||
{
|
||||
entry = (0, 0, new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
if (!baseMap.TryGetValue(result.FindingId, out var baseResult))
|
||||
{
|
||||
entry.added += 1;
|
||||
}
|
||||
else if (!string.Equals(baseResult.Severity, result.Severity, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var key = $"{baseResult.Severity}->{result.Severity}";
|
||||
entry.shifts.TryGetValue(key, out var count);
|
||||
entry.shifts[key] = count + 1;
|
||||
}
|
||||
|
||||
ruleImpact[ruleId] = entry;
|
||||
}
|
||||
|
||||
foreach (var result in baseline)
|
||||
{
|
||||
var ruleId = result.FiredRules?.FirstOrDefault() ?? "RULE-0000";
|
||||
if (!candidate.Any(c => c.FindingId == result.FindingId))
|
||||
{
|
||||
if (!ruleImpact.TryGetValue(ruleId, out var entry))
|
||||
{
|
||||
entry = (0, 0, new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
entry.removed += 1;
|
||||
ruleImpact[ruleId] = entry;
|
||||
}
|
||||
}
|
||||
|
||||
return ruleImpact
|
||||
.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.Select(kvp => new ConsoleRuleImpact(
|
||||
kvp.Key,
|
||||
kvp.Value.added,
|
||||
kvp.Value.removed,
|
||||
kvp.Value.shifts.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static ConsoleDiffSamples BuildSamples(IReadOnlyList<SimulationFindingResult> candidate, int maxSamples)
|
||||
{
|
||||
var ordered = candidate
|
||||
.OrderBy(f => f.FindingId, StringComparer.Ordinal)
|
||||
.Take(maxSamples)
|
||||
.ToList();
|
||||
|
||||
var explain = ordered
|
||||
.Select(f => CreateDeterministicId("trace", f.FindingId))
|
||||
.ToImmutableArray();
|
||||
|
||||
var findings = ordered
|
||||
.Select(f => f.FindingId)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new ConsoleDiffSamples(explain, findings);
|
||||
}
|
||||
|
||||
private static string CreateDeterministicId(params string[] parts)
|
||||
{
|
||||
var input = string.Join("|", parts);
|
||||
var hash = HashToBytes(input);
|
||||
var sb = new StringBuilder("ulid-");
|
||||
for (var i = 0; i < 8; i++)
|
||||
{
|
||||
sb.Append(hash[i].ToString("x2"));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private static byte[] HashToBytes(string input)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(input);
|
||||
return SHA256.HashData(bytes);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Policy.Engine.ConsoleSurface;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Endpoints;
|
||||
|
||||
internal static class ConsoleSimulationEndpoint
|
||||
{
|
||||
public static IEndpointRouteBuilder MapConsoleSimulationDiff(this IEndpointRouteBuilder routes)
|
||||
{
|
||||
routes.MapPost("/policy/console/simulations/diff", HandleAsync)
|
||||
.WithName("PolicyEngine.ConsoleSimulationDiff")
|
||||
.Produces<ConsoleSimulationDiffResponse>(StatusCodes.Status200OK)
|
||||
.ProducesValidationProblem();
|
||||
|
||||
return routes;
|
||||
}
|
||||
|
||||
private static IResult HandleAsync(
|
||||
[FromBody] ConsoleSimulationDiffRequest request,
|
||||
ConsoleSimulationDiffService service)
|
||||
{
|
||||
if (request is null)
|
||||
{
|
||||
return Results.ValidationProblem(new Dictionary<string, string[]>
|
||||
{
|
||||
["request"] = new[] { "Request body is required." }
|
||||
});
|
||||
}
|
||||
|
||||
if (request.EvaluationTimestamp == default)
|
||||
{
|
||||
return Results.ValidationProblem(new Dictionary<string, string[]>
|
||||
{
|
||||
["evaluationTimestamp"] = new[] { "evaluationTimestamp is required." }
|
||||
});
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.BaselinePolicyVersion) || string.IsNullOrWhiteSpace(request.CandidatePolicyVersion))
|
||||
{
|
||||
return Results.ValidationProblem(new Dictionary<string, string[]>
|
||||
{
|
||||
["policyVersion"] = new[] { "baselinePolicyVersion and candidatePolicyVersion are required." }
|
||||
});
|
||||
}
|
||||
|
||||
var response = service.Compute(request);
|
||||
return Results.Json(response);
|
||||
}
|
||||
}
|
||||
@@ -11,15 +11,16 @@ internal sealed record PolicyEvaluationRequest(
|
||||
PolicyIrDocument Document,
|
||||
PolicyEvaluationContext Context);
|
||||
|
||||
internal sealed record PolicyEvaluationContext(
|
||||
PolicyEvaluationSeverity Severity,
|
||||
PolicyEvaluationEnvironment Environment,
|
||||
PolicyEvaluationAdvisory Advisory,
|
||||
PolicyEvaluationVexEvidence Vex,
|
||||
PolicyEvaluationSbom Sbom,
|
||||
PolicyEvaluationExceptions Exceptions,
|
||||
PolicyEvaluationReachability Reachability,
|
||||
DateTimeOffset? EvaluationTimestamp = null)
|
||||
internal sealed record PolicyEvaluationContext(
|
||||
PolicyEvaluationSeverity Severity,
|
||||
PolicyEvaluationEnvironment Environment,
|
||||
PolicyEvaluationAdvisory Advisory,
|
||||
PolicyEvaluationVexEvidence Vex,
|
||||
PolicyEvaluationSbom Sbom,
|
||||
PolicyEvaluationExceptions Exceptions,
|
||||
PolicyEvaluationReachability Reachability,
|
||||
PolicyEvaluationEntropy Entropy,
|
||||
DateTimeOffset? EvaluationTimestamp = null)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the evaluation timestamp for deterministic time-based operations.
|
||||
@@ -36,12 +37,12 @@ internal sealed record PolicyEvaluationContext(
|
||||
PolicyEvaluationEnvironment environment,
|
||||
PolicyEvaluationAdvisory advisory,
|
||||
PolicyEvaluationVexEvidence vex,
|
||||
PolicyEvaluationSbom sbom,
|
||||
PolicyEvaluationExceptions exceptions,
|
||||
DateTimeOffset? evaluationTimestamp = null)
|
||||
: this(severity, environment, advisory, vex, sbom, exceptions, PolicyEvaluationReachability.Unknown, evaluationTimestamp)
|
||||
{
|
||||
}
|
||||
PolicyEvaluationSbom sbom,
|
||||
PolicyEvaluationExceptions exceptions,
|
||||
DateTimeOffset? evaluationTimestamp = null)
|
||||
: this(severity, environment, advisory, vex, sbom, exceptions, PolicyEvaluationReachability.Unknown, PolicyEvaluationEntropy.Unknown, evaluationTimestamp)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null);
|
||||
@@ -187,15 +188,15 @@ internal sealed record PolicyExceptionApplication(
|
||||
/// <summary>
|
||||
/// Reachability evidence for policy evaluation.
|
||||
/// </summary>
|
||||
internal sealed record PolicyEvaluationReachability(
|
||||
string State,
|
||||
decimal Confidence,
|
||||
decimal Score,
|
||||
bool HasRuntimeEvidence,
|
||||
string? Source,
|
||||
string? Method,
|
||||
string? EvidenceRef)
|
||||
{
|
||||
internal sealed record PolicyEvaluationReachability(
|
||||
string State,
|
||||
decimal Confidence,
|
||||
decimal Score,
|
||||
bool HasRuntimeEvidence,
|
||||
string? Source,
|
||||
string? Method,
|
||||
string? EvidenceRef)
|
||||
{
|
||||
/// <summary>
|
||||
/// Default unknown reachability state.
|
||||
/// </summary>
|
||||
@@ -275,4 +276,26 @@ internal sealed record PolicyEvaluationReachability(
|
||||
/// Whether this reachability data has low confidence (< 0.5).
|
||||
/// </summary>
|
||||
public bool IsLowConfidence => Confidence < 0.5m;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entropy evidence for policy evaluation.
|
||||
/// </summary>
|
||||
internal sealed record PolicyEvaluationEntropy(
|
||||
decimal Penalty,
|
||||
decimal ImageOpaqueRatio,
|
||||
bool Blocked,
|
||||
bool Warned,
|
||||
bool Capped,
|
||||
decimal? TopFileOpaqueRatio)
|
||||
{
|
||||
public static PolicyEvaluationEntropy Unknown { get; } = new(
|
||||
Penalty: 0m,
|
||||
ImageOpaqueRatio: 0m,
|
||||
Blocked: false,
|
||||
Warned: false,
|
||||
Capped: false,
|
||||
TopFileOpaqueRatio: null);
|
||||
|
||||
public bool HasData => Penalty != 0m || ImageOpaqueRatio != 0m || Warned || Blocked;
|
||||
}
|
||||
|
||||
@@ -63,12 +63,13 @@ internal sealed class PolicyExpressionEvaluator
|
||||
"vex" => new EvaluationValue(new VexScope(this, context.Vex)),
|
||||
"advisory" => new EvaluationValue(new AdvisoryScope(context.Advisory)),
|
||||
"sbom" => new EvaluationValue(new SbomScope(context.Sbom)),
|
||||
"reachability" => new EvaluationValue(new ReachabilityScope(context.Reachability)),
|
||||
"now" => new EvaluationValue(context.Now),
|
||||
"true" => EvaluationValue.True,
|
||||
"false" => EvaluationValue.False,
|
||||
_ => EvaluationValue.Null,
|
||||
};
|
||||
"reachability" => new EvaluationValue(new ReachabilityScope(context.Reachability)),
|
||||
"entropy" => new EvaluationValue(new EntropyScope(context.Entropy)),
|
||||
"now" => new EvaluationValue(context.Now),
|
||||
"true" => EvaluationValue.True,
|
||||
"false" => EvaluationValue.False,
|
||||
_ => EvaluationValue.Null,
|
||||
};
|
||||
}
|
||||
|
||||
private EvaluationValue EvaluateMember(PolicyMemberAccessExpression member, EvaluationScope scope)
|
||||
@@ -100,15 +101,20 @@ internal sealed class PolicyExpressionEvaluator
|
||||
return sbom.Get(member.Member);
|
||||
}
|
||||
|
||||
if (raw is ReachabilityScope reachability)
|
||||
{
|
||||
return reachability.Get(member.Member);
|
||||
}
|
||||
|
||||
if (raw is ComponentScope componentScope)
|
||||
{
|
||||
return componentScope.Get(member.Member);
|
||||
}
|
||||
if (raw is ReachabilityScope reachability)
|
||||
{
|
||||
return reachability.Get(member.Member);
|
||||
}
|
||||
|
||||
if (raw is EntropyScope entropy)
|
||||
{
|
||||
return entropy.Get(member.Member);
|
||||
}
|
||||
|
||||
if (raw is ComponentScope componentScope)
|
||||
{
|
||||
return componentScope.Get(member.Member);
|
||||
}
|
||||
|
||||
if (raw is RubyComponentScope rubyScope)
|
||||
{
|
||||
@@ -856,12 +862,12 @@ internal sealed class PolicyExpressionEvaluator
|
||||
/// - reachability.method == "static"
|
||||
/// </example>
|
||||
private sealed class ReachabilityScope
|
||||
{
|
||||
private readonly PolicyEvaluationReachability reachability;
|
||||
|
||||
public ReachabilityScope(PolicyEvaluationReachability reachability)
|
||||
{
|
||||
this.reachability = reachability;
|
||||
{
|
||||
private readonly PolicyEvaluationReachability reachability;
|
||||
|
||||
public ReachabilityScope(PolicyEvaluationReachability reachability)
|
||||
{
|
||||
this.reachability = reachability;
|
||||
}
|
||||
|
||||
public EvaluationValue Get(string member) => member.ToLowerInvariant() switch
|
||||
@@ -879,10 +885,35 @@ internal sealed class PolicyExpressionEvaluator
|
||||
"is_under_investigation" or "isunderinvestigation" => new EvaluationValue(reachability.IsUnderInvestigation),
|
||||
"is_high_confidence" or "ishighconfidence" => new EvaluationValue(reachability.IsHighConfidence),
|
||||
"is_medium_confidence" or "ismediumconfidence" => new EvaluationValue(reachability.IsMediumConfidence),
|
||||
"is_low_confidence" or "islowconfidence" => new EvaluationValue(reachability.IsLowConfidence),
|
||||
_ => EvaluationValue.Null,
|
||||
};
|
||||
}
|
||||
"is_low_confidence" or "islowconfidence" => new EvaluationValue(reachability.IsLowConfidence),
|
||||
_ => EvaluationValue.Null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SPL scope for entropy predicates.
|
||||
/// </summary>
|
||||
private sealed class EntropyScope
|
||||
{
|
||||
private readonly PolicyEvaluationEntropy entropy;
|
||||
|
||||
public EntropyScope(PolicyEvaluationEntropy entropy)
|
||||
{
|
||||
this.entropy = entropy;
|
||||
}
|
||||
|
||||
public EvaluationValue Get(string member) => member.ToLowerInvariant() switch
|
||||
{
|
||||
"penalty" => new EvaluationValue(entropy.Penalty),
|
||||
"image_opaque_ratio" or "imageopaqueratio" => new EvaluationValue(entropy.ImageOpaqueRatio),
|
||||
"blocked" => new EvaluationValue(entropy.Blocked),
|
||||
"warned" => new EvaluationValue(entropy.Warned),
|
||||
"capped" => new EvaluationValue(entropy.Capped),
|
||||
"top_file_opaque_ratio" or "topfileopaqueratio" => new EvaluationValue(entropy.TopFileOpaqueRatio),
|
||||
"has_data" or "hasdata" => new EvaluationValue(entropy.HasData),
|
||||
_ => EvaluationValue.Null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SPL scope for macOS component predicates.
|
||||
|
||||
@@ -16,6 +16,7 @@ using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Workers;
|
||||
using StellaOps.Policy.Engine.Streaming;
|
||||
using StellaOps.Policy.Engine.Telemetry;
|
||||
using StellaOps.Policy.Engine.ConsoleSurface;
|
||||
using StellaOps.AirGap.Policy;
|
||||
using StellaOps.Policy.Engine.Orchestration;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
@@ -138,6 +139,8 @@ builder.Services.AddSingleton<ExceptionLifecycleService>();
|
||||
builder.Services.AddHostedService<ExceptionLifecycleWorker>();
|
||||
builder.Services.AddHostedService<IncidentModeExpirationWorker>();
|
||||
builder.Services.AddHostedService<PolicyEngineBootstrapWorker>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Simulation.SimulationAnalyticsService>();
|
||||
builder.Services.AddSingleton<ConsoleSimulationDiffService>();
|
||||
builder.Services.AddSingleton<StellaOps.PolicyDsl.PolicyCompiler>();
|
||||
builder.Services.AddSingleton<PolicyCompilationService>();
|
||||
builder.Services.AddSingleton<StellaOps.Policy.Engine.Services.PathScopeMetrics>();
|
||||
@@ -223,9 +226,10 @@ app.MapPathScopeSimulation();
|
||||
app.MapOverlaySimulation();
|
||||
app.MapEvidenceSummaries();
|
||||
app.MapBatchEvaluation();
|
||||
app.MapConsoleSimulationDiff();
|
||||
app.MapTrustWeighting();
|
||||
app.MapAdvisoryAiKnobs();
|
||||
app.MapBatchContext();
|
||||
app.MapBatchContext();
|
||||
app.MapOrchestratorJobs();
|
||||
app.MapPolicyWorker();
|
||||
app.MapLedgerExport();
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -27,6 +28,9 @@ internal sealed record RuntimeEvaluationRequest(
|
||||
PolicyEvaluationSbom Sbom,
|
||||
PolicyEvaluationExceptions Exceptions,
|
||||
PolicyEvaluationReachability Reachability,
|
||||
string? EntropyLayerSummary,
|
||||
string? EntropyReport,
|
||||
bool? ProvenanceAttested,
|
||||
DateTimeOffset? EvaluationTimestamp = null,
|
||||
bool BypassCache = false);
|
||||
|
||||
@@ -59,6 +63,7 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
private readonly IPolicyEvaluationCache _cache;
|
||||
private readonly PolicyEvaluator _evaluator;
|
||||
private readonly ReachabilityFacts.ReachabilityFactsJoiningService? _reachabilityFacts;
|
||||
private readonly Signals.Entropy.EntropyPenaltyCalculator _entropy;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<PolicyRuntimeEvaluationService> _logger;
|
||||
|
||||
@@ -73,6 +78,7 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
IPolicyEvaluationCache cache,
|
||||
PolicyEvaluator evaluator,
|
||||
ReachabilityFacts.ReachabilityFactsJoiningService? reachabilityFacts,
|
||||
Signals.Entropy.EntropyPenaltyCalculator entropy,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<PolicyRuntimeEvaluationService> logger)
|
||||
{
|
||||
@@ -80,6 +86,7 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
_evaluator = evaluator ?? throw new ArgumentNullException(nameof(evaluator));
|
||||
_reachabilityFacts = reachabilityFacts;
|
||||
_entropy = entropy ?? throw new ArgumentNullException(nameof(entropy));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
@@ -158,6 +165,8 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
$"Compiled policy document not found for pack '{request.PackId}' version {request.Version}.");
|
||||
}
|
||||
|
||||
var entropy = ComputeEntropy(effectiveRequest);
|
||||
|
||||
var context = new PolicyEvaluationContext(
|
||||
effectiveRequest.Severity,
|
||||
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty),
|
||||
@@ -166,6 +175,7 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
effectiveRequest.Sbom,
|
||||
effectiveRequest.Exceptions,
|
||||
effectiveRequest.Reachability,
|
||||
entropy,
|
||||
evaluationTimestamp);
|
||||
|
||||
var evalRequest = new Evaluation.PolicyEvaluationRequest(document, context);
|
||||
@@ -335,6 +345,8 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
var startTimestamp = _timeProvider.GetTimestamp();
|
||||
var evaluationTimestamp = request.EvaluationTimestamp ?? _timeProvider.GetUtcNow();
|
||||
|
||||
var entropy = ComputeEntropy(request);
|
||||
|
||||
var context = new PolicyEvaluationContext(
|
||||
request.Severity,
|
||||
new PolicyEvaluationEnvironment(ImmutableDictionary<string, string>.Empty),
|
||||
@@ -343,6 +355,7 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
request.Sbom,
|
||||
request.Exceptions,
|
||||
request.Reachability,
|
||||
entropy,
|
||||
evaluationTimestamp);
|
||||
|
||||
var evalRequest = new Evaluation.PolicyEvaluationRequest(document, context);
|
||||
@@ -495,6 +508,12 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
source = request.Reachability.Source,
|
||||
method = request.Reachability.Method
|
||||
},
|
||||
entropy = new
|
||||
{
|
||||
layerSummary = request.EntropyLayerSummary is null ? null : StableHash(request.EntropyLayerSummary),
|
||||
entropyReport = request.EntropyReport is null ? null : StableHash(request.EntropyReport),
|
||||
provenanceAttested = request.ProvenanceAttested ?? false
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(contextData, ContextSerializerOptions);
|
||||
@@ -517,6 +536,42 @@ internal sealed class PolicyRuntimeEvaluationService
|
||||
return (long)elapsed.TotalMilliseconds;
|
||||
}
|
||||
|
||||
private PolicyEvaluationEntropy ComputeEntropy(RuntimeEvaluationRequest request)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(request.EntropyLayerSummary))
|
||||
{
|
||||
return PolicyEvaluationEntropy.Unknown;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var result = _entropy.ComputeFromJson(
|
||||
request.EntropyLayerSummary!,
|
||||
request.EntropyReport,
|
||||
request.ProvenanceAttested ?? false);
|
||||
|
||||
return new PolicyEvaluationEntropy(
|
||||
Penalty: result.Penalty,
|
||||
ImageOpaqueRatio: result.ImageOpaqueRatio,
|
||||
Blocked: result.Blocked,
|
||||
Warned: result.Warned,
|
||||
Capped: result.Capped,
|
||||
TopFileOpaqueRatio: result.TopFiles.FirstOrDefault()?.OpaqueRatio);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to compute entropy penalty; defaulting to zero.");
|
||||
return PolicyEvaluationEntropy.Unknown;
|
||||
}
|
||||
}
|
||||
|
||||
private static string StableHash(string input)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(Encoding.UTF8.GetBytes(input), hash);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private async Task<RuntimeEvaluationRequest> EnrichReachabilityAsync(
|
||||
RuntimeEvaluationRequest request,
|
||||
CancellationToken cancellationToken)
|
||||
|
||||
@@ -398,7 +398,7 @@ public sealed class EvaluationRunRepository : RepositoryBase<PolicyDataSource>,
|
||||
return reader.IsDBNull(ordinal) ? null : reader.GetInt32(ordinal);
|
||||
}
|
||||
|
||||
private static decimal? GetNullableDecimal(NpgsqlDataReader reader, int ordinal)
|
||||
private static new decimal? GetNullableDecimal(NpgsqlDataReader reader, int ordinal)
|
||||
{
|
||||
return reader.IsDBNull(ordinal) ? null : reader.GetDecimal(ordinal);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
using System;
|
||||
using StellaOps.Policy.Engine.ConsoleSurface;
|
||||
using StellaOps.Policy.Engine.Simulation;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.ConsoleSurface;
|
||||
|
||||
public sealed class ConsoleSimulationDiffServiceTests
|
||||
{
|
||||
[Fact]
|
||||
public void Compute_IsDeterministic_AndCarriesMetadata()
|
||||
{
|
||||
var analytics = new SimulationAnalyticsService();
|
||||
var service = new ConsoleSimulationDiffService(analytics);
|
||||
|
||||
var request = new ConsoleSimulationDiffRequest(
|
||||
BaselinePolicyVersion: "2025.11.24",
|
||||
CandidatePolicyVersion: "2025.12.02",
|
||||
ArtifactScope: new[]
|
||||
{
|
||||
new ConsoleArtifactScope("sha256:abc", "pkg:npm/foo@1.0.0"),
|
||||
new ConsoleArtifactScope("sha256:def", "pkg:npm/bar@2.0.0")
|
||||
},
|
||||
Filters: new ConsoleSimulationFilters(new[] { "high", "critical" }, new[] { "RULE-1234" }),
|
||||
Budget: new ConsoleSimulationBudget(maxFindings: 10, maxExplainSamples: 5),
|
||||
EvaluationTimestamp: new DateTimeOffset(2025, 12, 2, 0, 0, 0, TimeSpan.Zero));
|
||||
|
||||
var first = service.Compute(request);
|
||||
var second = service.Compute(request);
|
||||
|
||||
Assert.Equal(first, second); // deterministic
|
||||
Assert.Equal("console-policy-23-001", first.SchemaVersion);
|
||||
Assert.True(first.Summary.After.Total > 0);
|
||||
Assert.True(first.Summary.Before.Total > 0);
|
||||
Assert.NotEmpty(first.RuleImpact);
|
||||
Assert.True(first.Samples.Findings.Length <= 10);
|
||||
Assert.Equal(request.EvaluationTimestamp, first.Provenance.EvaluationTimestamp);
|
||||
}
|
||||
}
|
||||
@@ -344,17 +344,19 @@ policy "Baseline Production Policy" syntax "stella-dsl@1" {
|
||||
|
||||
private static PolicyEvaluationContext CreateContext(string severity, string exposure, PolicyEvaluationExceptions? exceptions = null)
|
||||
{
|
||||
return new PolicyEvaluationContext(
|
||||
new PolicyEvaluationSeverity(severity),
|
||||
new PolicyEvaluationEnvironment(new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["exposure"] = exposure
|
||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)),
|
||||
new PolicyEvaluationAdvisory("GHSA", ImmutableDictionary<string, string>.Empty),
|
||||
PolicyEvaluationVexEvidence.Empty,
|
||||
PolicyEvaluationSbom.Empty,
|
||||
exceptions ?? PolicyEvaluationExceptions.Empty);
|
||||
}
|
||||
return new PolicyEvaluationContext(
|
||||
new PolicyEvaluationSeverity(severity),
|
||||
new PolicyEvaluationEnvironment(new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["exposure"] = exposure
|
||||
}.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)),
|
||||
new PolicyEvaluationAdvisory("GHSA", ImmutableDictionary<string, string>.Empty),
|
||||
PolicyEvaluationVexEvidence.Empty,
|
||||
PolicyEvaluationSbom.Empty,
|
||||
exceptions ?? PolicyEvaluationExceptions.Empty,
|
||||
PolicyEvaluationReachability.Unknown,
|
||||
PolicyEvaluationEntropy.Unknown);
|
||||
}
|
||||
|
||||
private static string Describe(ImmutableArray<PolicyIssue> issues) =>
|
||||
string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}"));
|
||||
|
||||
@@ -8,6 +8,7 @@ using StellaOps.Policy.Engine.Evaluation;
|
||||
using StellaOps.Policy.Engine.ReachabilityFacts;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Policy.Engine.Services;
|
||||
using StellaOps.Policy.Engine.Signals.Entropy;
|
||||
using StellaOps.PolicyDsl;
|
||||
using Xunit;
|
||||
|
||||
@@ -250,6 +251,9 @@ public sealed class PolicyRuntimeEvaluationServiceTests
|
||||
Sbom: PolicyEvaluationSbom.Empty,
|
||||
Exceptions: PolicyEvaluationExceptions.Empty,
|
||||
Reachability: PolicyEvaluationReachability.Unknown,
|
||||
EntropyLayerSummary: null,
|
||||
EntropyReport: null,
|
||||
ProvenanceAttested: null,
|
||||
EvaluationTimestamp: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
BypassCache: false);
|
||||
}
|
||||
@@ -262,6 +266,7 @@ public sealed class PolicyRuntimeEvaluationServiceTests
|
||||
var options = Microsoft.Extensions.Options.Options.Create(new PolicyEngineOptions());
|
||||
var cache = new InMemoryPolicyEvaluationCache(cacheLogger, TimeProvider.System, options);
|
||||
var evaluator = new PolicyEvaluator();
|
||||
var entropy = new EntropyPenaltyCalculator(options, NullLogger<EntropyPenaltyCalculator>.Instance);
|
||||
|
||||
var reachabilityStore = new InMemoryReachabilityFactsStore(TimeProvider.System);
|
||||
var reachabilityCache = new InMemoryReachabilityFactsOverlayCache(
|
||||
@@ -281,6 +286,7 @@ public sealed class PolicyRuntimeEvaluationServiceTests
|
||||
cache,
|
||||
evaluator,
|
||||
reachabilityService,
|
||||
entropy,
|
||||
TimeProvider.System,
|
||||
serviceLogger);
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ public sealed class PackRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly PolicyPostgresFixture _fixture;
|
||||
private readonly PackRepository _repository;
|
||||
private readonly PackVersionRepository _packVersionRepository;
|
||||
private readonly string _tenantId = Guid.NewGuid().ToString();
|
||||
|
||||
public PackRepositoryTests(PolicyPostgresFixture fixture)
|
||||
@@ -22,6 +23,7 @@ public sealed class PackRepositoryTests : IAsyncLifetime
|
||||
options.SchemaName = fixture.SchemaName;
|
||||
var dataSource = new PolicyDataSource(Options.Create(options), NullLogger<PolicyDataSource>.Instance);
|
||||
_repository = new PackRepository(dataSource, NullLogger<PackRepository>.Instance);
|
||||
_packVersionRepository = new PackVersionRepository(dataSource, NullLogger<PackVersionRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
@@ -161,14 +163,49 @@ public sealed class PackRepositoryTests : IAsyncLifetime
|
||||
// Arrange
|
||||
var pack = CreatePack("version-test");
|
||||
await _repository.CreateAsync(pack);
|
||||
await CreatePackVersionAsync(pack.Id, 1, publish: true);
|
||||
|
||||
// Act
|
||||
var result = await _repository.SetActiveVersionAsync(_tenantId, pack.Id, 2);
|
||||
var result = await _repository.SetActiveVersionAsync(_tenantId, pack.Id, 1);
|
||||
var fetched = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
fetched!.ActiveVersion.Should().Be(2);
|
||||
fetched!.ActiveVersion.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PublishAndActivateVersions_FollowsWorkflow()
|
||||
{
|
||||
// Arrange
|
||||
var pack = CreatePack("workflow-pack");
|
||||
await _repository.CreateAsync(pack);
|
||||
|
||||
// Unpublished version cannot be activated
|
||||
var version1 = await CreatePackVersionAsync(pack.Id, 1);
|
||||
var activationBeforePublish = await _repository.SetActiveVersionAsync(_tenantId, pack.Id, 1);
|
||||
activationBeforePublish.Should().BeFalse();
|
||||
|
||||
// Publish v1 and activate
|
||||
await _packVersionRepository.PublishAsync(version1.Id, "tester");
|
||||
await _repository.SetActiveVersionAsync(_tenantId, pack.Id, 1);
|
||||
var activeAfterV1 = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
activeAfterV1!.ActiveVersion.Should().Be(1);
|
||||
|
||||
// Create and publish v2, then promote to active
|
||||
var nextVersion = await _packVersionRepository.GetNextVersionAsync(pack.Id);
|
||||
var version2 = await CreatePackVersionAsync(pack.Id, nextVersion);
|
||||
await _packVersionRepository.PublishAsync(version2.Id, "tester");
|
||||
await _repository.SetActiveVersionAsync(_tenantId, pack.Id, version2.Version);
|
||||
|
||||
// Assert ordering and active marker
|
||||
var versions = await _packVersionRepository.GetByPackIdAsync(pack.Id, publishedOnly: true);
|
||||
versions.Select(v => v.Version).Should().ContainInOrder(new[] { 2, 1 });
|
||||
var latest = await _packVersionRepository.GetLatestAsync(pack.Id);
|
||||
latest!.Version.Should().Be(2);
|
||||
|
||||
var finalPack = await _repository.GetByIdAsync(_tenantId, pack.Id);
|
||||
finalPack!.ActiveVersion.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -210,4 +247,27 @@ public sealed class PackRepositoryTests : IAsyncLifetime
|
||||
Name = name,
|
||||
IsBuiltin = false
|
||||
};
|
||||
|
||||
private async Task<PackVersionEntity> CreatePackVersionAsync(Guid packId, int version, bool publish = false)
|
||||
{
|
||||
var packVersion = new PackVersionEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
PackId = packId,
|
||||
Version = version,
|
||||
Description = $"v{version}",
|
||||
RulesHash = $"rules-hash-{version}",
|
||||
IsPublished = false
|
||||
};
|
||||
|
||||
var created = await _packVersionRepository.CreateAsync(packVersion);
|
||||
|
||||
if (publish)
|
||||
{
|
||||
await _packVersionRepository.PublishAsync(created.Id, "tester");
|
||||
created = (await _packVersionRepository.GetByIdAsync(created.Id))!;
|
||||
}
|
||||
|
||||
return created;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,7 +190,7 @@ public sealed class RiskProfileRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateProfile("version-create");
|
||||
await _repository.CreateAsync(original);
|
||||
original = await _repository.CreateAsync(original);
|
||||
|
||||
// Act
|
||||
var newVersion = new RiskProfileEntity
|
||||
@@ -207,6 +207,8 @@ public sealed class RiskProfileRepositoryTests : IAsyncLifetime
|
||||
// Assert
|
||||
created.Should().NotBeNull();
|
||||
created.Version.Should().Be(2);
|
||||
var originalAfter = await _repository.GetByIdAsync(_tenantId, original.Id);
|
||||
originalAfter!.IsActive.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -263,12 +265,84 @@ public sealed class RiskProfileRepositoryTests : IAsyncLifetime
|
||||
fetched.Should().BeNull();
|
||||
}
|
||||
|
||||
private RiskProfileEntity CreateProfile(string name) => new()
|
||||
[Fact]
|
||||
public async Task CreateVersion_HistoryRemainsQueryableAndOrdered()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = await _repository.CreateAsync(CreateProfile(
|
||||
name: "history-profile",
|
||||
thresholds: "{\"critical\":9.0}",
|
||||
scoringWeights: "{\"vulnerability\":1.0}"));
|
||||
|
||||
var v2 = new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "history-profile",
|
||||
DisplayName = "History V2",
|
||||
Description = "Second revision with tuned thresholds",
|
||||
Thresholds = "{\"critical\":8.0,\"high\":6.5}",
|
||||
ScoringWeights = "{\"vulnerability\":0.9}",
|
||||
Exemptions = "[]",
|
||||
Metadata = "{\"source\":\"unit-test\"}"
|
||||
};
|
||||
|
||||
// Act
|
||||
var createdV2 = await _repository.CreateVersionAsync(_tenantId, "history-profile", v2);
|
||||
|
||||
// Assert
|
||||
createdV2.Version.Should().Be(2);
|
||||
createdV2.IsActive.Should().BeTrue();
|
||||
|
||||
var versions = await _repository.GetVersionsByNameAsync(_tenantId, "history-profile");
|
||||
versions.Select(x => x.Version).Should().ContainInOrder(new[] { 2, 1 });
|
||||
versions.Single(x => x.Version == 1).IsActive.Should().BeFalse();
|
||||
versions.Single(x => x.Version == 1).Thresholds.Should().Contain("9.0");
|
||||
|
||||
var active = await _repository.GetActiveByNameAsync(_tenantId, "history-profile");
|
||||
active!.Version.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Activate_RevertsToPriorVersionAndDeactivatesCurrent()
|
||||
{
|
||||
// Arrange
|
||||
var v1 = await _repository.CreateAsync(CreateProfile("toggle-profile"));
|
||||
var v2 = await _repository.CreateVersionAsync(_tenantId, "toggle-profile", new RiskProfileEntity
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = "toggle-profile",
|
||||
DisplayName = "Toggle V2",
|
||||
Thresholds = "{\"critical\":8.5}"
|
||||
});
|
||||
|
||||
// Act
|
||||
var activated = await _repository.ActivateAsync(_tenantId, v1.Id);
|
||||
|
||||
// Assert
|
||||
activated.Should().BeTrue();
|
||||
var versions = await _repository.GetVersionsByNameAsync(_tenantId, "toggle-profile");
|
||||
versions.Single(x => x.Id == v1.Id).IsActive.Should().BeTrue();
|
||||
versions.Single(x => x.Id == v2.Id).IsActive.Should().BeFalse();
|
||||
|
||||
var active = await _repository.GetActiveByNameAsync(_tenantId, "toggle-profile");
|
||||
active!.Id.Should().Be(v1.Id);
|
||||
}
|
||||
|
||||
private RiskProfileEntity CreateProfile(
|
||||
string name,
|
||||
int version = 1,
|
||||
bool isActive = true,
|
||||
string? thresholds = null,
|
||||
string? scoringWeights = null) => new()
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
TenantId = _tenantId,
|
||||
Name = name,
|
||||
Version = 1,
|
||||
IsActive = true
|
||||
Version = version,
|
||||
IsActive = isActive,
|
||||
Thresholds = thresholds ?? "{}",
|
||||
ScoringWeights = scoringWeights ?? "{}"
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,82 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record LinksetSeverityDto
|
||||
{
|
||||
[JsonPropertyName("source")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Source { get; init; }
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Type { get; init; }
|
||||
|
||||
[JsonPropertyName("score")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public double? Score { get; init; }
|
||||
|
||||
[JsonPropertyName("vector")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Vector { get; init; }
|
||||
|
||||
[JsonPropertyName("origin")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Origin { get; init; }
|
||||
|
||||
[JsonPropertyName("labels")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyDictionary<string, string>? Labels { get; init; }
|
||||
|
||||
[JsonPropertyName("raw")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyDictionary<string, object?>? Raw { get; init; }
|
||||
}
|
||||
|
||||
public sealed record LinksetConflictDto
|
||||
{
|
||||
[JsonPropertyName("field")]
|
||||
public string Field { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("reason")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Reason { get; init; }
|
||||
|
||||
[JsonPropertyName("values")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? Values { get; init; }
|
||||
|
||||
[JsonPropertyName("sourceIds")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? SourceIds { get; init; }
|
||||
}
|
||||
|
||||
public sealed record LinksetSummaryDto
|
||||
{
|
||||
[JsonPropertyName("advisoryId")]
|
||||
public string AdvisoryId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public string Source { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public double? Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("observationIds")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? ObservationIds { get; init; }
|
||||
|
||||
[JsonPropertyName("references")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? References { get; init; }
|
||||
|
||||
[JsonPropertyName("severities")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<LinksetSeverityDto>? Severities { get; init; }
|
||||
|
||||
[JsonPropertyName("conflicts")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<LinksetConflictDto>? Conflicts { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
public sealed record LinksetSummaryRequestDto
|
||||
{
|
||||
[JsonPropertyName("advisoryIds")]
|
||||
public IReadOnlyList<string> AdvisoryIds { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("imageDigest")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ImageDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("includePolicyOverlay")]
|
||||
public bool IncludePolicyOverlay { get; init; }
|
||||
}
|
||||
|
||||
public sealed record LinksetSummaryResponseDto
|
||||
{
|
||||
[JsonPropertyName("linksets")]
|
||||
public IReadOnlyList<LinksetSummaryDto> Linksets { get; init; } = Array.Empty<LinksetSummaryDto>();
|
||||
|
||||
[JsonPropertyName("policy")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public RuntimePolicyImageResponseDto? Policy { get; init; }
|
||||
}
|
||||
@@ -62,7 +62,12 @@ public sealed record ReportDocumentDto
|
||||
[JsonPropertyName("surface")]
|
||||
[JsonPropertyOrder(8)]
|
||||
public SurfacePointersDto? Surface { get; init; }
|
||||
}
|
||||
|
||||
[JsonPropertyName("linksets")]
|
||||
[JsonPropertyOrder(9)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<LinksetSummaryDto>? Linksets { get; init; }
|
||||
}
|
||||
|
||||
public sealed record ReportPolicyDto
|
||||
{
|
||||
|
||||
@@ -66,14 +66,18 @@ public sealed record RuntimePolicyImageResponseDto
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? QuietedBy { get; init; }
|
||||
|
||||
[JsonPropertyName("metadata")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Metadata { get; init; }
|
||||
|
||||
[JsonPropertyName("buildIds")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? BuildIds { get; init; }
|
||||
}
|
||||
[JsonPropertyName("metadata")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Metadata { get; init; }
|
||||
|
||||
[JsonPropertyName("buildIds")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<string>? BuildIds { get; init; }
|
||||
|
||||
[JsonPropertyName("linksets")]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public IReadOnlyList<LinksetSummaryDto>? Linksets { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimePolicyRekorDto
|
||||
{
|
||||
|
||||
@@ -83,18 +83,30 @@ internal static class PolicyEndpoints
|
||||
return operation;
|
||||
});
|
||||
|
||||
policyGroup.MapPost("/overlay", HandlePolicyOverlayAsync)
|
||||
.WithName("scanner.policy.overlay")
|
||||
.Produces<PolicyOverlayResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.Reports)
|
||||
.WithOpenApi(operation =>
|
||||
{
|
||||
operation.Summary = "Request policy overlays for graph nodes.";
|
||||
operation.Description = "Returns deterministic policy overlays with runtime evidence for graph nodes (Cartographer integration). Overlay IDs are computed as sha256(tenant|nodeId|overlayKind).";
|
||||
return operation;
|
||||
});
|
||||
}
|
||||
policyGroup.MapPost("/overlay", HandlePolicyOverlayAsync)
|
||||
.WithName("scanner.policy.overlay")
|
||||
.Produces<PolicyOverlayResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.Reports)
|
||||
.WithOpenApi(operation =>
|
||||
{
|
||||
operation.Summary = "Request policy overlays for graph nodes.";
|
||||
operation.Description = "Returns deterministic policy overlays with runtime evidence for graph nodes (Cartographer integration). Overlay IDs are computed as sha256(tenant|nodeId|overlayKind).";
|
||||
return operation;
|
||||
});
|
||||
|
||||
policyGroup.MapPost("/linksets", HandleLinksetSummaryAsync)
|
||||
.WithName("scanner.policy.linksets")
|
||||
.Produces<LinksetSummaryResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.Reports)
|
||||
.WithOpenApi(operation =>
|
||||
{
|
||||
operation.Summary = "Fetch advisory linkset summaries with optional policy overlay.";
|
||||
operation.Description = "Returns linkset severities/conflicts for advisory IDs and, when requested, runtime policy overlay for the provided image digest.";
|
||||
return operation;
|
||||
});
|
||||
}
|
||||
|
||||
private static IResult HandleSchemaAsync(HttpContext context)
|
||||
{
|
||||
@@ -188,11 +200,11 @@ internal static class PolicyEndpoints
|
||||
return Json(payload);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleRuntimePolicyAsync(
|
||||
RuntimePolicyRequestDto request,
|
||||
IRuntimePolicyService runtimePolicyService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
private static async Task<IResult> HandleRuntimePolicyAsync(
|
||||
RuntimePolicyRequestDto request,
|
||||
IRuntimePolicyService runtimePolicyService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(runtimePolicyService);
|
||||
@@ -273,8 +285,96 @@ internal static class PolicyEndpoints
|
||||
var evaluation = await runtimePolicyService.EvaluateAsync(evaluationRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var resultPayload = MapRuntimePolicyResponse(evaluation);
|
||||
return Json(resultPayload);
|
||||
}
|
||||
return Json(resultPayload);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleLinksetSummaryAsync(
|
||||
LinksetSummaryRequestDto request,
|
||||
ILinksetResolver linksetResolver,
|
||||
IRuntimePolicyService runtimePolicyService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(linksetResolver);
|
||||
ArgumentNullException.ThrowIfNull(runtimePolicyService);
|
||||
|
||||
if (request.AdvisoryIds is null || request.AdvisoryIds.Count == 0)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid linkset request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "advisoryIds must include at least one value.");
|
||||
}
|
||||
|
||||
if (request.IncludePolicyOverlay && string.IsNullOrWhiteSpace(request.ImageDigest))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid linkset request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "imageDigest is required when includePolicyOverlay is true.");
|
||||
}
|
||||
|
||||
var linksets = await linksetResolver.ResolveByAdvisoryIdsAsync(request.AdvisoryIds, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
RuntimePolicyImageResponseDto? policy = null;
|
||||
if (request.IncludePolicyOverlay && !string.IsNullOrWhiteSpace(request.ImageDigest))
|
||||
{
|
||||
var runtimeRequest = new RuntimePolicyRequestDto
|
||||
{
|
||||
Images = new[] { request.ImageDigest!.Trim() }
|
||||
};
|
||||
|
||||
var evaluation = await runtimePolicyService.EvaluateAsync(
|
||||
new RuntimePolicyEvaluationRequest(
|
||||
runtimeRequest.Namespace,
|
||||
new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(StringComparer.Ordinal)),
|
||||
runtimeRequest.Images),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (evaluation.Results.TryGetValue(request.ImageDigest!.Trim(), out var decision))
|
||||
{
|
||||
RuntimePolicyRekorDto? rekor = null;
|
||||
if (decision.Rekor is not null)
|
||||
{
|
||||
rekor = new RuntimePolicyRekorDto
|
||||
{
|
||||
Uuid = decision.Rekor.Uuid,
|
||||
Url = decision.Rekor.Url,
|
||||
Verified = decision.Rekor.Verified
|
||||
};
|
||||
}
|
||||
|
||||
policy = new RuntimePolicyImageResponseDto
|
||||
{
|
||||
PolicyVerdict = decision.PolicyVerdict.ToString().ToLowerInvariant(),
|
||||
Signed = decision.Signed,
|
||||
HasSbomReferrers = decision.HasSbomReferrers,
|
||||
HasSbomLegacy = decision.HasSbomReferrers,
|
||||
Reasons = decision.Reasons,
|
||||
Rekor = rekor,
|
||||
Confidence = Math.Round(decision.Confidence, 6, MidpointRounding.AwayFromZero),
|
||||
Quieted = decision.Quieted,
|
||||
QuietedBy = decision.QuietedBy,
|
||||
Metadata = decision.Metadata is { Count: > 0 } ? JsonSerializer.Serialize(decision.Metadata) : null,
|
||||
BuildIds = decision.BuildIds,
|
||||
Linksets = decision.Linksets
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
var response = new LinksetSummaryResponseDto
|
||||
{
|
||||
Linksets = linksets,
|
||||
Policy = policy
|
||||
};
|
||||
|
||||
return Json(response);
|
||||
}
|
||||
|
||||
private static string NormalizeSegment(string segment)
|
||||
{
|
||||
@@ -322,15 +422,16 @@ internal static class PolicyEndpoints
|
||||
Signed = decision.Signed,
|
||||
HasSbomReferrers = decision.HasSbomReferrers,
|
||||
HasSbomLegacy = decision.HasSbomReferrers,
|
||||
Reasons = decision.Reasons.ToArray(),
|
||||
Rekor = rekor,
|
||||
Confidence = Math.Round(decision.Confidence, 6, MidpointRounding.AwayFromZero),
|
||||
Quieted = decision.Quieted,
|
||||
QuietedBy = decision.QuietedBy,
|
||||
Metadata = metadata,
|
||||
BuildIds = decision.BuildIds is { Count: > 0 } ? decision.BuildIds.ToArray() : null
|
||||
};
|
||||
}
|
||||
Reasons = decision.Reasons.ToArray(),
|
||||
Rekor = rekor,
|
||||
Confidence = Math.Round(decision.Confidence, 6, MidpointRounding.AwayFromZero),
|
||||
Quieted = decision.Quieted,
|
||||
QuietedBy = decision.QuietedBy,
|
||||
Metadata = metadata,
|
||||
BuildIds = decision.BuildIds is { Count: > 0 } ? decision.BuildIds.ToArray() : null,
|
||||
Linksets = decision.Linksets is { Count: > 0 } ? decision.Linksets.ToArray() : null
|
||||
};
|
||||
}
|
||||
|
||||
return new RuntimePolicyResponseDto
|
||||
{
|
||||
|
||||
@@ -57,6 +57,7 @@ internal static class ReportEndpoints
|
||||
TimeProvider timeProvider,
|
||||
IReportEventDispatcher eventDispatcher,
|
||||
ISurfacePointerService surfacePointerService,
|
||||
ILinksetResolver linksetResolver,
|
||||
ILoggerFactory loggerFactory,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
@@ -67,6 +68,7 @@ internal static class ReportEndpoints
|
||||
ArgumentNullException.ThrowIfNull(timeProvider);
|
||||
ArgumentNullException.ThrowIfNull(eventDispatcher);
|
||||
ArgumentNullException.ThrowIfNull(surfacePointerService);
|
||||
ArgumentNullException.ThrowIfNull(linksetResolver);
|
||||
ArgumentNullException.ThrowIfNull(loggerFactory);
|
||||
var logger = loggerFactory.CreateLogger("Scanner.WebService.Reports");
|
||||
|
||||
@@ -128,15 +130,16 @@ internal static class ReportEndpoints
|
||||
extensions: extensions);
|
||||
}
|
||||
|
||||
var projectedVerdicts = preview.Diffs
|
||||
.Select(diff => PolicyDtoMapper.ToVerdictDto(diff.Projected))
|
||||
.ToArray();
|
||||
|
||||
var issuesDto = preview.Issues.Select(PolicyDtoMapper.ToIssueDto).ToArray();
|
||||
var projectedVerdicts = preview.Diffs
|
||||
.Select(diff => PolicyDtoMapper.ToVerdictDto(diff.Projected))
|
||||
.ToArray();
|
||||
|
||||
var issuesDto = preview.Issues.Select(PolicyDtoMapper.ToIssueDto).ToArray();
|
||||
var summary = BuildSummary(projectedVerdicts);
|
||||
var verdict = ComputeVerdict(projectedVerdicts);
|
||||
var reportId = CreateReportId(request.ImageDigest!, preview.PolicyDigest);
|
||||
var generatedAt = timeProvider.GetUtcNow();
|
||||
var linksets = await linksetResolver.ResolveAsync(request.Findings, cancellationToken).ConfigureAwait(false);
|
||||
SurfacePointersDto? surfacePointers = null;
|
||||
|
||||
try
|
||||
@@ -171,7 +174,8 @@ internal static class ReportEndpoints
|
||||
Summary = summary,
|
||||
Verdicts = projectedVerdicts,
|
||||
Issues = issuesDto,
|
||||
Surface = surfacePointers
|
||||
Surface = surfacePointers,
|
||||
Linksets = linksets.Count == 0 ? null : linksets
|
||||
};
|
||||
|
||||
var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(document, SerializerOptions);
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Scanner.WebService.Options;
|
||||
|
||||
public sealed class ConcelierLinksetOptions
|
||||
{
|
||||
public const string SectionName = "scanner:concelier";
|
||||
|
||||
public bool Enabled { get; set; }
|
||||
public string? BaseUrl { get; set; }
|
||||
public string? ApiKey { get; set; }
|
||||
public string ApiKeyHeader { get; set; } = "Authorization";
|
||||
public int TimeoutSeconds { get; set; } = 10;
|
||||
}
|
||||
@@ -8,6 +8,7 @@ using Microsoft.AspNetCore.Diagnostics;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Serilog;
|
||||
using Serilog.Events;
|
||||
@@ -17,6 +18,7 @@ using StellaOps.Configuration;
|
||||
using StellaOps.Plugin.DependencyInjection;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
using StellaOps.Cryptography.Plugin.BouncyCastle;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.Cache;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
@@ -30,6 +32,7 @@ using StellaOps.Scanner.WebService.Endpoints;
|
||||
using StellaOps.Scanner.WebService.Extensions;
|
||||
using StellaOps.Scanner.WebService.Hosting;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Replay;
|
||||
@@ -205,6 +208,7 @@ builder.Services.AddSingleton<IPostConfigureOptions<ScannerStorageOptions>, Scan
|
||||
builder.Services.AddSingleton<RuntimeEventRateLimiter>();
|
||||
builder.Services.AddSingleton<IRuntimeEventIngestionService, RuntimeEventIngestionService>();
|
||||
builder.Services.AddSingleton<IRuntimeAttestationVerifier, RuntimeAttestationVerifier>();
|
||||
builder.Services.AddSingleton<ILinksetResolver, LinksetResolver>();
|
||||
builder.Services.AddSingleton<IRuntimePolicyService, RuntimePolicyService>();
|
||||
|
||||
var pluginHostOptions = ScannerPluginHostFactory.Build(bootstrapOptions, contentRoot);
|
||||
@@ -429,3 +433,36 @@ internal sealed class SurfaceCacheOptionsConfigurator : IConfigureOptions<Surfac
|
||||
options.RootDirectory = settings.CacheRoot.FullName;
|
||||
}
|
||||
}
|
||||
builder.Services.Configure<ConcelierLinksetOptions>(builder.Configuration.GetSection(ConcelierLinksetOptions.SectionName));
|
||||
|
||||
builder.Services.AddHttpClient<ConcelierHttpLinksetQueryService>((sp, client) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<ConcelierLinksetOptions>>().Value;
|
||||
if (!string.IsNullOrWhiteSpace(options.BaseUrl))
|
||||
{
|
||||
client.BaseAddress = new Uri(options.BaseUrl);
|
||||
}
|
||||
|
||||
client.Timeout = TimeSpan.FromSeconds(Math.Max(1, options.TimeoutSeconds));
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.ApiKey))
|
||||
{
|
||||
var header = string.IsNullOrWhiteSpace(options.ApiKeyHeader) ? "Authorization" : options.ApiKeyHeader;
|
||||
client.DefaultRequestHeaders.TryAddWithoutValidation(header, options.ApiKey);
|
||||
}
|
||||
})
|
||||
.ConfigurePrimaryHttpMessageHandler(() => new HttpClientHandler
|
||||
{
|
||||
AutomaticDecompression = System.Net.DecompressionMethods.All
|
||||
});
|
||||
|
||||
builder.Services.AddSingleton<IAdvisoryLinksetQueryService>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<ConcelierLinksetOptions>>().Value;
|
||||
if (options.Enabled && !string.IsNullOrWhiteSpace(options.BaseUrl))
|
||||
{
|
||||
return sp.GetRequiredService<ConcelierHttpLinksetQueryService>();
|
||||
}
|
||||
|
||||
return new NullAdvisoryLinksetQueryService();
|
||||
});
|
||||
|
||||
@@ -0,0 +1,171 @@
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class ConcelierHttpLinksetQueryService : IAdvisoryLinksetQueryService
|
||||
{
|
||||
private readonly HttpClient _client;
|
||||
private readonly ConcelierLinksetOptions _options;
|
||||
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public ConcelierHttpLinksetQueryService(HttpClient client, IOptions<ConcelierLinksetOptions> options)
|
||||
{
|
||||
_client = client ?? throw new ArgumentNullException(nameof(client));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
public async Task<AdvisoryLinksetQueryResult> QueryAsync(AdvisoryLinksetQueryOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (options.AdvisoryIds is null)
|
||||
{
|
||||
return new AdvisoryLinksetQueryResult(ImmutableArray<AdvisoryLinkset>.Empty, null, false);
|
||||
}
|
||||
|
||||
var results = ImmutableArray.CreateBuilder<AdvisoryLinkset>();
|
||||
foreach (var advisoryId in options.AdvisoryIds)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(advisoryId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var path = $"/v1/lnm/linksets/{Uri.EscapeDataString(advisoryId)}?tenant={Uri.EscapeDataString(options.Tenant)}&includeConflicts=true&includeObservations=false&includeTimeline=false";
|
||||
try
|
||||
{
|
||||
using var response = await _client.GetAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<LinksetDetailResponse>(SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
if (payload?.Linksets is null || payload.Linksets.Length == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var linkset in payload.Linksets)
|
||||
{
|
||||
results.Add(Map(linkset));
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// swallow and continue; caller will see partial results
|
||||
}
|
||||
}
|
||||
|
||||
var linksets = results.ToImmutable();
|
||||
return new AdvisoryLinksetQueryResult(linksets, null, false);
|
||||
}
|
||||
|
||||
private static AdvisoryLinkset Map(LinksetDto dto)
|
||||
{
|
||||
var normalized = dto.Normalized is null
|
||||
? null
|
||||
: new AdvisoryLinksetNormalized(
|
||||
dto.Normalized.Purls,
|
||||
dto.Normalized.Cpes,
|
||||
dto.Normalized.Versions,
|
||||
dto.Normalized.Ranges,
|
||||
dto.Normalized.Severities);
|
||||
|
||||
var conflicts = dto.Conflicts is null
|
||||
? null
|
||||
: dto.Conflicts.Select(c => new AdvisoryLinksetConflict(c.Field, c.Reason ?? string.Empty, c.Values, c.SourceIds)).ToList();
|
||||
|
||||
return new AdvisoryLinkset(
|
||||
TenantId: dto.Tenant ?? string.Empty,
|
||||
Source: dto.Source ?? string.Empty,
|
||||
AdvisoryId: dto.AdvisoryId ?? string.Empty,
|
||||
ObservationIds: dto.ObservationIds?.ToImmutableArray() ?? ImmutableArray<string>.Empty,
|
||||
Normalized: normalized,
|
||||
Provenance: null,
|
||||
Confidence: dto.Confidence,
|
||||
Conflicts: conflicts,
|
||||
CreatedAt: dto.CreatedAt ?? DateTimeOffset.MinValue,
|
||||
BuiltByJobId: dto.BuiltByJobId);
|
||||
}
|
||||
|
||||
private sealed record LinksetDetailResponse([property: JsonPropertyName("linksets")] LinksetDto[] Linksets);
|
||||
|
||||
private sealed record LinksetDto
|
||||
{
|
||||
[JsonPropertyName("advisoryId")]
|
||||
public string? AdvisoryId { get; init; }
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
|
||||
[JsonPropertyName("tenant")]
|
||||
public string? Tenant { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public double? Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("createdAt")]
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("builtByJobId")]
|
||||
public string? BuiltByJobId { get; init; }
|
||||
|
||||
[JsonPropertyName("observationIds")]
|
||||
public string[]? ObservationIds { get; init; }
|
||||
|
||||
[JsonPropertyName("normalized")]
|
||||
public LinksetNormalizedDto? Normalized { get; init; }
|
||||
|
||||
[JsonPropertyName("conflicts")]
|
||||
public LinksetConflictDto[]? Conflicts { get; init; }
|
||||
}
|
||||
|
||||
private sealed record LinksetNormalizedDto
|
||||
{
|
||||
[JsonPropertyName("purls")]
|
||||
public IReadOnlyList<string>? Purls { get; init; }
|
||||
|
||||
[JsonPropertyName("cpes")]
|
||||
public IReadOnlyList<string>? Cpes { get; init; }
|
||||
|
||||
[JsonPropertyName("versions")]
|
||||
public IReadOnlyList<string>? Versions { get; init; }
|
||||
|
||||
[JsonPropertyName("ranges")]
|
||||
public IReadOnlyList<Dictionary<string, object?>>? Ranges { get; init; }
|
||||
|
||||
[JsonPropertyName("severities")]
|
||||
public IReadOnlyList<Dictionary<string, object?>>? Severities { get; init; }
|
||||
}
|
||||
|
||||
private sealed record LinksetConflictDto
|
||||
{
|
||||
[JsonPropertyName("field")]
|
||||
public string Field { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("reason")]
|
||||
public string? Reason { get; init; }
|
||||
|
||||
[JsonPropertyName("values")]
|
||||
public IReadOnlyList<string>? Values { get; init; }
|
||||
|
||||
[JsonPropertyName("sourceIds")]
|
||||
public IReadOnlyList<string>? SourceIds { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,181 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal interface ILinksetResolver
|
||||
{
|
||||
Task<IReadOnlyList<LinksetSummaryDto>> ResolveAsync(IEnumerable<PolicyPreviewFindingDto>? findings, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<LinksetSummaryDto>> ResolveAsync(IEnumerable<StellaOps.Policy.PolicyVerdict> verdicts, CancellationToken cancellationToken);
|
||||
Task<IReadOnlyList<LinksetSummaryDto>> ResolveByAdvisoryIdsAsync(IEnumerable<string> advisoryIds, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class LinksetResolver : ILinksetResolver
|
||||
{
|
||||
private readonly IAdvisoryLinksetQueryService _queryService;
|
||||
private readonly ISurfaceEnvironment _surfaceEnvironment;
|
||||
private readonly ILogger<LinksetResolver> _logger;
|
||||
|
||||
public LinksetResolver(
|
||||
IAdvisoryLinksetQueryService queryService,
|
||||
ISurfaceEnvironment surfaceEnvironment,
|
||||
ILogger<LinksetResolver> logger)
|
||||
{
|
||||
_queryService = queryService ?? throw new ArgumentNullException(nameof(queryService));
|
||||
_surfaceEnvironment = surfaceEnvironment ?? throw new ArgumentNullException(nameof(surfaceEnvironment));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<LinksetSummaryDto>> ResolveAsync(IEnumerable<PolicyPreviewFindingDto>? findings, CancellationToken cancellationToken)
|
||||
{
|
||||
var advisoryIds = findings?
|
||||
.SelectMany(f => new[] { f?.Id, f?.Cve })
|
||||
.Where(id => !string.IsNullOrWhiteSpace(id))
|
||||
.Select(id => id!.Trim())
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray() ?? Array.Empty<string>();
|
||||
|
||||
return ResolveInternalAsync(advisoryIds, cancellationToken);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<LinksetSummaryDto>> ResolveAsync(IEnumerable<StellaOps.Policy.PolicyVerdict> verdicts, CancellationToken cancellationToken)
|
||||
{
|
||||
var advisoryIds = verdicts?
|
||||
.Where(v => !string.IsNullOrWhiteSpace(v.FindingId))
|
||||
.Select(v => v.FindingId!.Trim())
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray() ?? Array.Empty<string>();
|
||||
|
||||
return ResolveInternalAsync(advisoryIds, cancellationToken);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<LinksetSummaryDto>> ResolveByAdvisoryIdsAsync(IEnumerable<string> advisoryIds, CancellationToken cancellationToken)
|
||||
{
|
||||
var normalized = advisoryIds?
|
||||
.Where(id => !string.IsNullOrWhiteSpace(id))
|
||||
.Select(id => id.Trim())
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToArray() ?? Array.Empty<string>();
|
||||
|
||||
return ResolveInternalAsync(normalized, cancellationToken);
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<LinksetSummaryDto>> ResolveInternalAsync(IReadOnlyList<string> advisoryIds, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (advisoryIds.Count == 0)
|
||||
{
|
||||
return Array.Empty<LinksetSummaryDto>();
|
||||
}
|
||||
|
||||
var tenant = string.IsNullOrWhiteSpace(_surfaceEnvironment.Settings.Tenant)
|
||||
? "default"
|
||||
: _surfaceEnvironment.Settings.Tenant.Trim();
|
||||
|
||||
try
|
||||
{
|
||||
var options = new AdvisoryLinksetQueryOptions(tenant, advisoryIds, Sources: null, Limit: advisoryIds.Count);
|
||||
var result = await _queryService.QueryAsync(options, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (result.Linksets.IsDefaultOrEmpty)
|
||||
{
|
||||
return Array.Empty<LinksetSummaryDto>();
|
||||
}
|
||||
|
||||
return result.Linksets
|
||||
.Select(MapSummary)
|
||||
.OrderBy(ls => ls.AdvisoryId, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to resolve linksets for {Count} advisories (tenant={Tenant}).", advisoryIds.Count, tenant);
|
||||
return Array.Empty<LinksetSummaryDto>();
|
||||
}
|
||||
}
|
||||
|
||||
private static LinksetSummaryDto MapSummary(AdvisoryLinkset linkset)
|
||||
{
|
||||
var severities = linkset.Normalized?.Severities?.Select(MapSeverity).ToArray();
|
||||
var conflicts = linkset.Conflicts?.Select(MapConflict).ToArray();
|
||||
|
||||
return new LinksetSummaryDto
|
||||
{
|
||||
AdvisoryId = linkset.AdvisoryId,
|
||||
Source = linkset.Source,
|
||||
Confidence = linkset.Confidence,
|
||||
ObservationIds = linkset.ObservationIds.Length > 0 ? linkset.ObservationIds : null,
|
||||
References = null,
|
||||
Severities = severities?.Length > 0 ? severities : null,
|
||||
Conflicts = conflicts?.Length > 0 ? conflicts : null
|
||||
};
|
||||
}
|
||||
|
||||
private static LinksetSeverityDto MapSeverity(Dictionary<string, object?> payload)
|
||||
{
|
||||
payload ??= new Dictionary<string, object?>(StringComparer.Ordinal);
|
||||
|
||||
string? GetString(string key)
|
||||
=> payload.TryGetValue(key, out var value) ? value?.ToString() : null;
|
||||
|
||||
double? GetDouble(string key)
|
||||
{
|
||||
if (!payload.TryGetValue(key, out var value) || value is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value is double d)
|
||||
{
|
||||
return d;
|
||||
}
|
||||
|
||||
if (value is float f)
|
||||
{
|
||||
return Convert.ToDouble(f, CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
if (double.TryParse(value.ToString(), NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
var labels = payload.TryGetValue("labels", out var labelsValue) && labelsValue is Dictionary<string, object?> labelsDict
|
||||
? labelsDict.ToDictionary(kv => kv.Key, kv => kv.Value?.ToString() ?? string.Empty, StringComparer.Ordinal)
|
||||
: null;
|
||||
|
||||
var raw = payload.Count == 0
|
||||
? null
|
||||
: payload.ToDictionary(kv => kv.Key, kv => kv.Value, StringComparer.Ordinal);
|
||||
|
||||
return new LinksetSeverityDto
|
||||
{
|
||||
Source = GetString("source"),
|
||||
Type = GetString("type"),
|
||||
Score = GetDouble("score"),
|
||||
Vector = GetString("vector"),
|
||||
Origin = GetString("origin"),
|
||||
Labels = labels,
|
||||
Raw = raw
|
||||
};
|
||||
}
|
||||
|
||||
private static LinksetConflictDto MapConflict(AdvisoryLinksetConflict conflict)
|
||||
{
|
||||
return new LinksetConflictDto
|
||||
{
|
||||
Field = conflict.Field,
|
||||
Reason = conflict.Reason,
|
||||
Values = conflict.Values,
|
||||
SourceIds = conflict.SourceIds
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class NullAdvisoryLinksetQueryService : IAdvisoryLinksetQueryService
|
||||
{
|
||||
public Task<AdvisoryLinksetQueryResult> QueryAsync(AdvisoryLinksetQueryOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return Task.FromResult(new AdvisoryLinksetQueryResult(ImmutableArray<AdvisoryLinkset>.Empty, null, false));
|
||||
}
|
||||
}
|
||||
@@ -7,13 +7,14 @@ using System.Linq;
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Options;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
using RuntimePolicyVerdict = StellaOps.Zastava.Core.Contracts.PolicyVerdict;
|
||||
using CanonicalPolicyVerdict = StellaOps.Policy.PolicyVerdict;
|
||||
using CanonicalPolicyVerdictStatus = StellaOps.Policy.PolicyVerdictStatus;
|
||||
@@ -35,35 +36,38 @@ internal sealed class RuntimePolicyService : IRuntimePolicyService
|
||||
|
||||
private readonly LinkRepository _linkRepository;
|
||||
private readonly ArtifactRepository _artifactRepository;
|
||||
private readonly RuntimeEventRepository _runtimeEventRepository;
|
||||
private readonly PolicySnapshotStore _policySnapshotStore;
|
||||
private readonly PolicyPreviewService _policyPreviewService;
|
||||
private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IRuntimeAttestationVerifier _attestationVerifier;
|
||||
private readonly ILogger<RuntimePolicyService> _logger;
|
||||
private readonly RuntimeEventRepository _runtimeEventRepository;
|
||||
private readonly PolicySnapshotStore _policySnapshotStore;
|
||||
private readonly PolicyPreviewService _policyPreviewService;
|
||||
private readonly ILinksetResolver _linksetResolver;
|
||||
private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IRuntimeAttestationVerifier _attestationVerifier;
|
||||
private readonly ILogger<RuntimePolicyService> _logger;
|
||||
|
||||
public RuntimePolicyService(
|
||||
LinkRepository linkRepository,
|
||||
ArtifactRepository artifactRepository,
|
||||
RuntimeEventRepository runtimeEventRepository,
|
||||
PolicySnapshotStore policySnapshotStore,
|
||||
PolicyPreviewService policyPreviewService,
|
||||
IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor,
|
||||
TimeProvider timeProvider,
|
||||
IRuntimeAttestationVerifier attestationVerifier,
|
||||
ILogger<RuntimePolicyService> logger)
|
||||
{
|
||||
_linkRepository = linkRepository ?? throw new ArgumentNullException(nameof(linkRepository));
|
||||
_artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository));
|
||||
_runtimeEventRepository = runtimeEventRepository ?? throw new ArgumentNullException(nameof(runtimeEventRepository));
|
||||
_policySnapshotStore = policySnapshotStore ?? throw new ArgumentNullException(nameof(policySnapshotStore));
|
||||
_policyPreviewService = policyPreviewService ?? throw new ArgumentNullException(nameof(policyPreviewService));
|
||||
_optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_attestationVerifier = attestationVerifier ?? throw new ArgumentNullException(nameof(attestationVerifier));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
LinkRepository linkRepository,
|
||||
ArtifactRepository artifactRepository,
|
||||
RuntimeEventRepository runtimeEventRepository,
|
||||
PolicySnapshotStore policySnapshotStore,
|
||||
PolicyPreviewService policyPreviewService,
|
||||
ILinksetResolver linksetResolver,
|
||||
IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor,
|
||||
TimeProvider timeProvider,
|
||||
IRuntimeAttestationVerifier attestationVerifier,
|
||||
ILogger<RuntimePolicyService> logger)
|
||||
{
|
||||
_linkRepository = linkRepository ?? throw new ArgumentNullException(nameof(linkRepository));
|
||||
_artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository));
|
||||
_runtimeEventRepository = runtimeEventRepository ?? throw new ArgumentNullException(nameof(runtimeEventRepository));
|
||||
_policySnapshotStore = policySnapshotStore ?? throw new ArgumentNullException(nameof(policySnapshotStore));
|
||||
_policyPreviewService = policyPreviewService ?? throw new ArgumentNullException(nameof(policyPreviewService));
|
||||
_linksetResolver = linksetResolver ?? throw new ArgumentNullException(nameof(linksetResolver));
|
||||
_optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_attestationVerifier = attestationVerifier ?? throw new ArgumentNullException(nameof(attestationVerifier));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<RuntimePolicyEvaluationResult> EvaluateAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
@@ -118,13 +122,14 @@ internal sealed class RuntimePolicyService : IRuntimePolicyService
|
||||
heuristicReasons.Add("policy.snapshot.missing");
|
||||
}
|
||||
|
||||
ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts = ImmutableArray<CanonicalPolicyVerdict>.Empty;
|
||||
ImmutableArray<PolicyIssue> issues = ImmutableArray<PolicyIssue>.Empty;
|
||||
|
||||
try
|
||||
{
|
||||
if (!findings.IsDefaultOrEmpty && findings.Length > 0)
|
||||
{
|
||||
ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts = ImmutableArray<CanonicalPolicyVerdict>.Empty;
|
||||
ImmutableArray<PolicyIssue> issues = ImmutableArray<PolicyIssue>.Empty;
|
||||
IReadOnlyList<LinksetSummaryDto> linksets = Array.Empty<LinksetSummaryDto>();
|
||||
|
||||
try
|
||||
{
|
||||
if (!findings.IsDefaultOrEmpty && findings.Length > 0)
|
||||
{
|
||||
var previewRequest = new PolicyPreviewRequest(
|
||||
image,
|
||||
findings,
|
||||
@@ -133,14 +138,15 @@ internal sealed class RuntimePolicyService : IRuntimePolicyService
|
||||
ProposedPolicy: null);
|
||||
|
||||
var preview = await _policyPreviewService.PreviewAsync(previewRequest, cancellationToken).ConfigureAwait(false);
|
||||
issues = preview.Issues;
|
||||
if (!preview.Diffs.IsDefaultOrEmpty)
|
||||
{
|
||||
projectedVerdicts = preview.Diffs.Select(diff => diff.Projected).ToImmutableArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
issues = preview.Issues;
|
||||
if (!preview.Diffs.IsDefaultOrEmpty)
|
||||
{
|
||||
projectedVerdicts = preview.Diffs.Select(diff => diff.Projected).ToImmutableArray();
|
||||
linksets = await _linksetResolver.ResolveAsync(projectedVerdicts, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogWarning(ex, "Runtime policy preview failed for image {ImageDigest}; falling back to heuristic evaluation.", image);
|
||||
}
|
||||
@@ -151,12 +157,13 @@ internal sealed class RuntimePolicyService : IRuntimePolicyService
|
||||
var decision = await BuildDecisionAsync(
|
||||
image,
|
||||
metadata,
|
||||
heuristicReasons,
|
||||
projectedVerdicts,
|
||||
issues,
|
||||
policyDigest,
|
||||
buildIdObservation?.BuildIds,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
heuristicReasons,
|
||||
projectedVerdicts,
|
||||
issues,
|
||||
policyDigest,
|
||||
linksets,
|
||||
buildIdObservation?.BuildIds,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
results[image] = decision;
|
||||
|
||||
@@ -279,12 +286,13 @@ internal sealed class RuntimePolicyService : IRuntimePolicyService
|
||||
private async Task<RuntimePolicyImageDecision> BuildDecisionAsync(
|
||||
string imageDigest,
|
||||
RuntimeImageMetadata metadata,
|
||||
List<string> heuristicReasons,
|
||||
ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts,
|
||||
ImmutableArray<PolicyIssue> issues,
|
||||
string? policyDigest,
|
||||
IReadOnlyList<string>? buildIds,
|
||||
CancellationToken cancellationToken)
|
||||
List<string> heuristicReasons,
|
||||
ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts,
|
||||
ImmutableArray<PolicyIssue> issues,
|
||||
string? policyDigest,
|
||||
IReadOnlyList<LinksetSummaryDto> linksets,
|
||||
IReadOnlyList<string>? buildIds,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var reasons = new List<string>(heuristicReasons);
|
||||
|
||||
@@ -330,18 +338,19 @@ internal sealed class RuntimePolicyService : IRuntimePolicyService
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
return new RuntimePolicyImageDecision(
|
||||
overallVerdict,
|
||||
metadata.Signed,
|
||||
metadata.HasSbomReferrers,
|
||||
normalizedReasons,
|
||||
rekor,
|
||||
metadataPayload,
|
||||
confidence,
|
||||
quieted,
|
||||
quietedBy,
|
||||
buildIds);
|
||||
}
|
||||
return new RuntimePolicyImageDecision(
|
||||
overallVerdict,
|
||||
metadata.Signed,
|
||||
metadata.HasSbomReferrers,
|
||||
normalizedReasons,
|
||||
rekor,
|
||||
metadataPayload,
|
||||
confidence,
|
||||
quieted,
|
||||
quietedBy,
|
||||
buildIds,
|
||||
linksets);
|
||||
}
|
||||
|
||||
private RuntimePolicyVerdict MapVerdict(ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, IReadOnlyList<string> heuristicReasons)
|
||||
{
|
||||
@@ -501,17 +510,18 @@ internal sealed record RuntimePolicyEvaluationResult(
|
||||
string? PolicyRevision,
|
||||
IReadOnlyDictionary<string, RuntimePolicyImageDecision> Results);
|
||||
|
||||
internal sealed record RuntimePolicyImageDecision(
|
||||
RuntimePolicyVerdict PolicyVerdict,
|
||||
bool Signed,
|
||||
bool HasSbomReferrers,
|
||||
IReadOnlyList<string> Reasons,
|
||||
RuntimePolicyRekorReference? Rekor,
|
||||
IDictionary<string, object?>? Metadata,
|
||||
double Confidence,
|
||||
bool Quieted,
|
||||
string? QuietedBy,
|
||||
IReadOnlyList<string>? BuildIds);
|
||||
internal sealed record RuntimePolicyImageDecision(
|
||||
RuntimePolicyVerdict PolicyVerdict,
|
||||
bool Signed,
|
||||
bool HasSbomReferrers,
|
||||
IReadOnlyList<string> Reasons,
|
||||
RuntimePolicyRekorReference? Rekor,
|
||||
IDictionary<string, object?>? Metadata,
|
||||
double Confidence,
|
||||
bool Quieted,
|
||||
string? QuietedBy,
|
||||
IReadOnlyList<string>? BuildIds,
|
||||
IReadOnlyList<LinksetSummaryDto> Linksets);
|
||||
|
||||
internal sealed record RuntimePolicyRekorReference(string? Uuid, string? Url, bool? Verified);
|
||||
|
||||
|
||||
@@ -36,5 +36,7 @@
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" />
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -28,6 +28,8 @@ public sealed class ScannerWorkerOptions
|
||||
|
||||
public StellaOpsCryptoOptions Crypto { get; } = new();
|
||||
|
||||
public SigningOptions Signing { get; } = new();
|
||||
|
||||
public DeterminismOptions Determinism { get; } = new();
|
||||
|
||||
public sealed class QueueOptions
|
||||
@@ -208,4 +210,35 @@ public sealed class ScannerWorkerOptions
|
||||
/// </summary>
|
||||
public int? ConcurrencyLimit { get; set; }
|
||||
}
|
||||
|
||||
public sealed class SigningOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Enable DSSE signing for surface artifacts (composition recipe, layer fragments).
|
||||
/// When disabled, the worker will fall back to deterministic hash envelopes.
|
||||
/// </summary>
|
||||
public bool EnableDsseSigning { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Identifier recorded in DSSE signatures.
|
||||
/// </summary>
|
||||
public string KeyId { get; set; } = "scanner-hmac";
|
||||
|
||||
/// <summary>
|
||||
/// Shared secret material for HMAC-based DSSE signatures (base64 or hex).
|
||||
/// Prefer <see cref=\"SharedSecretFile\"/> for file-based loading.
|
||||
/// </summary>
|
||||
public string? SharedSecret { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional path to a file containing the shared secret (base64 or hex).
|
||||
/// </summary>
|
||||
public string? SharedSecretFile { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Allow deterministic fallback when signing is enabled but no secret is provided.
|
||||
/// Keeps offline determinism while avoiding hard failures in sealed-mode runs.
|
||||
/// </summary>
|
||||
public bool AllowDeterministicFallback { get; set; } = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Options;
|
||||
|
||||
@@ -89,11 +90,21 @@ public sealed class ScannerWorkerOptionsValidator : IValidateOptions<ScannerWork
|
||||
}
|
||||
}
|
||||
|
||||
if (options.Shutdown.Timeout < TimeSpan.FromSeconds(5))
|
||||
{
|
||||
failures.Add("Scanner.Worker:Shutdown:Timeout must be at least 5 seconds to allow lease completion.");
|
||||
}
|
||||
|
||||
if (options.Shutdown.Timeout < TimeSpan.FromSeconds(5))
|
||||
{
|
||||
failures.Add("Scanner.Worker:Shutdown:Timeout must be at least 5 seconds to allow lease completion.");
|
||||
}
|
||||
|
||||
if (options.Signing.EnableDsseSigning)
|
||||
{
|
||||
var hasSecret = !string.IsNullOrWhiteSpace(options.Signing.SharedSecret)
|
||||
|| (!string.IsNullOrWhiteSpace(options.Signing.SharedSecretFile) && File.Exists(options.Signing.SharedSecretFile));
|
||||
if (!hasSecret && !options.Signing.AllowDeterministicFallback)
|
||||
{
|
||||
failures.Add("Scanner.Worker:Signing requires SharedSecret or SharedSecretFile when EnableDsseSigning is true and AllowDeterministicFallback is false.");
|
||||
}
|
||||
}
|
||||
|
||||
if (options.Telemetry.EnableTelemetry)
|
||||
{
|
||||
if (!options.Telemetry.EnableMetrics && !options.Telemetry.EnableTracing)
|
||||
|
||||
@@ -0,0 +1,220 @@
|
||||
using System;
|
||||
using System.Buffers.Text;
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Processing.Surface;
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope signer that prefers an HMAC key (deterministic) and falls back to
|
||||
/// the deterministic hash-only signer when no key is configured.
|
||||
/// </summary>
|
||||
internal sealed class HmacDsseEnvelopeSigner : IDsseEnvelopeSigner, IDisposable
|
||||
{
|
||||
private readonly ILogger<HmacDsseEnvelopeSigner> _logger;
|
||||
private readonly ScannerWorkerOptions _options;
|
||||
private readonly DeterministicDsseEnvelopeSigner _deterministic = new();
|
||||
private readonly HMACSHA256? _hmac;
|
||||
private readonly string _keyId;
|
||||
|
||||
public HmacDsseEnvelopeSigner(
|
||||
IOptions<ScannerWorkerOptions> options,
|
||||
ILogger<HmacDsseEnvelopeSigner> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
|
||||
var signing = _options.Signing;
|
||||
_keyId = string.IsNullOrWhiteSpace(signing.KeyId) ? "scanner-hmac" : signing.KeyId.Trim();
|
||||
|
||||
if (!signing.EnableDsseSigning)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var secretBytes = LoadSecret(signing);
|
||||
if (secretBytes is not null && secretBytes.Length > 0)
|
||||
{
|
||||
_hmac = new HMACSHA256(secretBytes);
|
||||
_logger.LogInformation("DSSE signing enabled using HMAC-SHA256 with key id {KeyId}", _keyId);
|
||||
}
|
||||
else if (!signing.AllowDeterministicFallback)
|
||||
{
|
||||
throw new InvalidOperationException("DSSE signing enabled but no shared secret provided and deterministic fallback is disabled.");
|
||||
}
|
||||
}
|
||||
|
||||
public Task<DsseEnvelope> SignAsync(string payloadType, ReadOnlyMemory<byte> content, string suggestedKind, string merkleRoot, string? view, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_hmac is null)
|
||||
{
|
||||
return _deterministic.SignAsync(payloadType, content, suggestedKind, merkleRoot, view, cancellationToken);
|
||||
}
|
||||
|
||||
var pae = BuildPae(payloadType, content.Span);
|
||||
var signatureBytes = _hmac.ComputeHash(pae);
|
||||
var envelope = new
|
||||
{
|
||||
payloadType,
|
||||
payload = Base64UrlEncode(content.Span),
|
||||
signatures = new[]
|
||||
{
|
||||
new { keyid = _keyId, sig = Base64UrlEncode(signatureBytes) }
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(envelope, new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
});
|
||||
|
||||
var bytes = Encoding.UTF8.GetBytes(json);
|
||||
var digest = $"sha256:{ComputeSha256Hex(content.Span)}";
|
||||
var uri = $"cas://attestations/{suggestedKind}/{digest}.json";
|
||||
|
||||
return Task.FromResult(new DsseEnvelope("application/vnd.dsse+json", uri, digest, bytes));
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_hmac?.Dispose();
|
||||
}
|
||||
|
||||
private static byte[]? LoadSecret(ScannerWorkerOptions.SigningOptions signing)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(signing.SharedSecretFile) && File.Exists(signing.SharedSecretFile))
|
||||
{
|
||||
var fileContent = File.ReadAllText(signing.SharedSecretFile).Trim();
|
||||
var fromFile = DecodeFlexible(fileContent);
|
||||
if (fromFile is not null)
|
||||
{
|
||||
return fromFile;
|
||||
}
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(signing.SharedSecret))
|
||||
{
|
||||
var inline = DecodeFlexible(signing.SharedSecret);
|
||||
if (inline is not null)
|
||||
{
|
||||
return inline;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static byte[]? DecodeFlexible(string value)
|
||||
{
|
||||
// Try base64 (std)
|
||||
if (Convert.TryFromBase64String(value, Span<byte>.Empty, out var needed))
|
||||
{
|
||||
var buffer = new byte[needed];
|
||||
if (Convert.TryFromBase64String(value, buffer, out _))
|
||||
{
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
|
||||
// Try base64url
|
||||
if (Base64UrlDecode(value) is { } base64Url)
|
||||
{
|
||||
return base64Url;
|
||||
}
|
||||
|
||||
// Try hex
|
||||
if (value.Length % 2 == 0)
|
||||
{
|
||||
try
|
||||
{
|
||||
var bytes = Convert.FromHexString(value);
|
||||
return bytes;
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to UTF-8 bytes as last resort (deterministic but not recommended)
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return Encoding.UTF8.GetBytes(value);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, ReadOnlySpan<byte> payload)
|
||||
{
|
||||
const string prefix = "DSSEv1";
|
||||
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
var typeLen = Encoding.UTF8.GetBytes(typeBytes.Length.ToString());
|
||||
var payloadLen = Encoding.UTF8.GetBytes(payload.Length.ToString());
|
||||
|
||||
var total = prefix.Length + 1 + typeLen.Length + 1 + typeBytes.Length + 1 + payloadLen.Length + 1 + payload.Length;
|
||||
var buffer = new byte[total];
|
||||
var offset = 0;
|
||||
|
||||
Encoding.UTF8.GetBytes(prefix, buffer.AsSpan(offset));
|
||||
offset += prefix.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
typeLen.CopyTo(buffer.AsSpan(offset));
|
||||
offset += typeLen.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
typeBytes.CopyTo(buffer.AsSpan(offset));
|
||||
offset += typeBytes.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
payloadLen.CopyTo(buffer.AsSpan(offset));
|
||||
offset += payloadLen.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
payload.CopyTo(buffer.AsSpan(offset));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
private static string ComputeSha256Hex(ReadOnlySpan<byte> data)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(data, hash);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string Base64UrlEncode(ReadOnlySpan<byte> data)
|
||||
{
|
||||
var len = Base64.GetMaxEncodedToUtf8Length(data.Length);
|
||||
Span<byte> buffer = stackalloc byte[len];
|
||||
Base64.EncodeToUtf8(data, buffer, out _, out var written);
|
||||
var encoded = Encoding.UTF8.GetString(buffer[..written]);
|
||||
return encoded.TrimEnd('=').Replace('+', '-').Replace('/', '_');
|
||||
}
|
||||
|
||||
private static byte[]? Base64UrlDecode(string value)
|
||||
{
|
||||
var normalized = value.Replace('-', '+').Replace('_', '/');
|
||||
while (normalized.Length % 4 != 0)
|
||||
{
|
||||
normalized += "=";
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return Convert.FromBase64String(normalized);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -101,7 +101,7 @@ if (!string.IsNullOrWhiteSpace(connectionString))
|
||||
builder.Services.AddSingleton<IConfigureOptions<ScannerStorageOptions>, ScannerStorageSurfaceSecretConfigurator>();
|
||||
builder.Services.AddSingleton<ISurfaceManifestPublisher, SurfaceManifestPublisher>();
|
||||
builder.Services.AddSingleton<IScanStageExecutor, SurfaceManifestStageExecutor>();
|
||||
builder.Services.AddSingleton<IDsseEnvelopeSigner, DeterministicDsseEnvelopeSigner>();
|
||||
builder.Services.AddSingleton<IDsseEnvelopeSigner, HmacDsseEnvelopeSigner>();
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@@ -0,0 +1,112 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Concelier.Core.Linksets;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Tests;
|
||||
|
||||
public sealed class LinksetResolverTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ResolveAsync_MapsSeveritiesAndConflicts()
|
||||
{
|
||||
var linkset = new AdvisoryLinkset(
|
||||
TenantId: "tenant-a",
|
||||
Source: "osv",
|
||||
AdvisoryId: "CVE-2025-0001",
|
||||
ObservationIds: ImmutableArray<string>.Empty,
|
||||
Normalized: new AdvisoryLinksetNormalized(
|
||||
Purls: new[] { "pkg:npm/demo@1.0.0" },
|
||||
Cpes: Array.Empty<string>(),
|
||||
Versions: Array.Empty<string>(),
|
||||
Ranges: Array.Empty<Dictionary<string, object?>>(),
|
||||
Severities: new[]
|
||||
{
|
||||
new Dictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["source"] = "nvd",
|
||||
["type"] = "cvssv3",
|
||||
["score"] = 9.8,
|
||||
["vector"] = "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H",
|
||||
["labels"] = new Dictionary<string, object?> { ["preferred"] = "true" }
|
||||
}
|
||||
}),
|
||||
Provenance: null,
|
||||
Confidence: 0.91,
|
||||
Conflicts: new[] { new AdvisoryLinksetConflict("severity", "disagree", new[] { "cvssv2", "cvssv3" }) },
|
||||
CreatedAt: DateTimeOffset.UtcNow,
|
||||
BuiltByJobId: "job-1");
|
||||
|
||||
var resolver = new LinksetResolver(
|
||||
new FakeLinksetQueryService(linkset),
|
||||
new FakeSurfaceEnvironment(),
|
||||
NullLogger<LinksetResolver>.Instance);
|
||||
|
||||
var result = await resolver.ResolveAsync(new[]
|
||||
{
|
||||
new PolicyPreviewFindingDto { Id = "CVE-2025-0001" }
|
||||
}, CancellationToken.None);
|
||||
|
||||
var summary = Assert.Single(result);
|
||||
Assert.Equal("CVE-2025-0001", summary.AdvisoryId);
|
||||
Assert.Equal("osv", summary.Source);
|
||||
Assert.Equal(0.91, summary.Confidence);
|
||||
|
||||
var severity = Assert.Single(summary.Severities!);
|
||||
Assert.Equal("nvd", severity.Source);
|
||||
Assert.Equal("cvssv3", severity.Type);
|
||||
Assert.Equal(9.8, severity.Score);
|
||||
Assert.Equal("AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", severity.Vector);
|
||||
Assert.NotNull(severity.Labels);
|
||||
|
||||
var conflict = Assert.Single(summary.Conflicts!);
|
||||
Assert.Equal("severity", conflict.Field);
|
||||
Assert.Equal("disagree", conflict.Reason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ResolveAsync_ReturnsEmptyWhenNoIds()
|
||||
{
|
||||
var resolver = new LinksetResolver(
|
||||
new FakeLinksetQueryService(),
|
||||
new FakeSurfaceEnvironment(),
|
||||
NullLogger<LinksetResolver>.Instance);
|
||||
|
||||
var result = await resolver.ResolveAsync(Array.Empty<PolicyPreviewFindingDto>(), CancellationToken.None);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
private sealed class FakeLinksetQueryService : IAdvisoryLinksetQueryService
|
||||
{
|
||||
private readonly AdvisoryLinkset[] _linksets;
|
||||
|
||||
public FakeLinksetQueryService(params AdvisoryLinkset[] linksets)
|
||||
{
|
||||
_linksets = linksets;
|
||||
}
|
||||
|
||||
public Task<AdvisoryLinksetQueryResult> QueryAsync(AdvisoryLinksetQueryOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
var matched = _linksets
|
||||
.Where(ls => options.AdvisoryIds?.Contains(ls.AdvisoryId, StringComparer.OrdinalIgnoreCase) == true)
|
||||
.ToImmutableArray();
|
||||
return Task.FromResult(new AdvisoryLinksetQueryResult(matched, null, false));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class FakeSurfaceEnvironment : ISurfaceEnvironment
|
||||
{
|
||||
public SurfaceEnvironmentSettings Settings { get; } = new()
|
||||
{
|
||||
Tenant = "tenant-a"
|
||||
};
|
||||
|
||||
public IReadOnlyDictionary<string, string> RawVariables { get; } = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["SCANNER__TENANT"] = "tenant-a"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Worker.Options;
|
||||
using StellaOps.Scanner.Worker.Processing.Surface;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests;
|
||||
|
||||
public sealed class HmacDsseEnvelopeSignerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SignAsync_UsesHmac_WhenSecretProvided()
|
||||
{
|
||||
var options = BuildOptions(signing =>
|
||||
{
|
||||
signing.EnableDsseSigning = true;
|
||||
signing.SharedSecret = "a2V5LXNlY3JldA=="; // base64("key-secret")
|
||||
signing.KeyId = "scanner-hmac";
|
||||
});
|
||||
|
||||
var signer = new HmacDsseEnvelopeSigner(options, NullLogger<HmacDsseEnvelopeSigner>.Instance);
|
||||
var payload = Encoding.UTF8.GetBytes("{\"hello\":\"world\"}");
|
||||
|
||||
var envelope = await signer.SignAsync("application/json", payload, "test.kind", "root", view: null, CancellationToken.None);
|
||||
|
||||
var json = JsonDocument.Parse(envelope.Content.Span);
|
||||
var sig = json.RootElement.GetProperty("signatures")[0].GetProperty("sig").GetString();
|
||||
|
||||
var expectedSig = ComputeExpectedSignature("application/json", payload, "a2V5LXNlY3JldA==");
|
||||
Assert.Equal(expectedSig, sig);
|
||||
Assert.Equal("application/vnd.dsse+json", envelope.MediaType);
|
||||
Assert.Equal("scanner-hmac", json.RootElement.GetProperty("signatures")[0].GetProperty("keyid").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SignAsync_FallsBackToDeterministic_WhenSecretMissing()
|
||||
{
|
||||
var options = BuildOptions(signing =>
|
||||
{
|
||||
signing.EnableDsseSigning = true;
|
||||
signing.SharedSecret = null;
|
||||
signing.SharedSecretFile = null;
|
||||
signing.AllowDeterministicFallback = true;
|
||||
});
|
||||
|
||||
var signer = new HmacDsseEnvelopeSigner(options, NullLogger<HmacDsseEnvelopeSigner>.Instance);
|
||||
var payload = Encoding.UTF8.GetBytes("abc");
|
||||
|
||||
var envelope = await signer.SignAsync("text/plain", payload, "kind", "root", view: null, CancellationToken.None);
|
||||
var json = JsonDocument.Parse(envelope.Content.Span);
|
||||
var sig = json.RootElement.GetProperty("signatures")[0].GetProperty("sig").GetString();
|
||||
|
||||
// Deterministic signer encodes sha256 hex of payload as signature.
|
||||
var expected = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(payload)).ToLowerInvariant();
|
||||
var expectedBase64Url = Base64UrlEncode(Encoding.UTF8.GetBytes(expected));
|
||||
Assert.Equal(expectedBase64Url, sig);
|
||||
}
|
||||
|
||||
private static IOptions<ScannerWorkerOptions> BuildOptions(Action<ScannerWorkerOptions.SigningOptions> configure)
|
||||
{
|
||||
var options = new ScannerWorkerOptions();
|
||||
configure(options.Signing);
|
||||
return Microsoft.Extensions.Options.Options.Create(options);
|
||||
}
|
||||
|
||||
private static string ComputeExpectedSignature(string payloadType, byte[] payload, string base64Secret)
|
||||
{
|
||||
var secret = Convert.FromBase64String(base64Secret);
|
||||
using var hmac = new System.Security.Cryptography.HMACSHA256(secret);
|
||||
var pae = BuildPae(payloadType, payload);
|
||||
var signature = hmac.ComputeHash(pae);
|
||||
return Base64UrlEncode(signature);
|
||||
}
|
||||
|
||||
private static byte[] BuildPae(string payloadType, byte[] payload)
|
||||
{
|
||||
const string prefix = "DSSEv1";
|
||||
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
|
||||
var typeLen = Encoding.UTF8.GetBytes(typeBytes.Length.ToString());
|
||||
var payloadLen = Encoding.UTF8.GetBytes(payload.Length.ToString());
|
||||
|
||||
var total = prefix.Length + 1 + typeLen.Length + 1 + typeBytes.Length + 1 + payloadLen.Length + 1 + payload.Length;
|
||||
var buffer = new byte[total];
|
||||
var offset = 0;
|
||||
|
||||
Encoding.UTF8.GetBytes(prefix, buffer.AsSpan(offset));
|
||||
offset += prefix.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
typeLen.CopyTo(buffer.AsSpan(offset));
|
||||
offset += typeLen.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
typeBytes.CopyTo(buffer.AsSpan(offset));
|
||||
offset += typeBytes.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
payloadLen.CopyTo(buffer.AsSpan(offset));
|
||||
offset += payloadLen.Length;
|
||||
buffer[offset++] = 0x20;
|
||||
|
||||
payload.CopyTo(buffer.AsSpan(offset));
|
||||
return buffer;
|
||||
}
|
||||
|
||||
private static string Base64UrlEncode(ReadOnlySpan<byte> data)
|
||||
{
|
||||
var base64 = Convert.ToBase64String(data);
|
||||
return base64.TrimEnd('=').Replace('+', '-').Replace('/', '_');
|
||||
}
|
||||
}
|
||||
@@ -9,10 +9,12 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Models", "__Libraries\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj", "{382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo", "__Libraries\StellaOps.Scheduler.Storage.Mongo\StellaOps.Scheduler.Storage.Mongo.csproj", "{33770BC5-6802-45AD-A866-10027DD360E2}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.ImpactIndex", "__Libraries\StellaOps.Scheduler.ImpactIndex\StellaOps.Scheduler.ImpactIndex.csproj", "{56209C24-3CE7-4F8E-8B8C-F052CB919DE2}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo", "__Libraries\StellaOps.Scheduler.Storage.Mongo\StellaOps.Scheduler.Storage.Mongo.csproj", "{33770BC5-6802-45AD-A866-10027DD360E2}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Postgres", "__Libraries\StellaOps.Scheduler.Storage.Postgres\StellaOps.Scheduler.Storage.Postgres.csproj", "{167198F1-43CF-42F4-BEF2-5ABC87116A37}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.ImpactIndex", "__Libraries\StellaOps.Scheduler.ImpactIndex\StellaOps.Scheduler.ImpactIndex.csproj", "{56209C24-3CE7-4F8E-8B8C-F052CB919DE2}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{214ED54A-FA25-4189-9F58-50D11F079ACF}"
|
||||
@@ -36,9 +38,13 @@ EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Queue", "..\Notify\__Libraries\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj", "{827D179C-A229-439E-A878-4028F30CA670}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Worker.Host", "StellaOps.Scheduler.Worker.Host\StellaOps.Scheduler.Worker.Host.csproj", "{37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}"
|
||||
EndProject
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tools", "Tools", "{694D5197-0F28-46B9-BAA2-EFC9825C23D4}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Scheduler.Backfill", "Tools\Scheduler.Backfill\Scheduler.Backfill.csproj", "{9C1AC284-0561-4E78-9EA8-9B55C3180512}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.ImpactIndex.Tests", "__Tests\StellaOps.Scheduler.ImpactIndex.Tests\StellaOps.Scheduler.ImpactIndex.Tests.csproj", "{5ED2BF16-72CE-4DF1-917C-6D832427AE6F}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "..\Scanner\__Libraries\StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj", "{11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}"
|
||||
@@ -57,10 +63,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Queue.T
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo.Tests", "__Tests\StellaOps.Scheduler.Storage.Mongo.Tests\StellaOps.Scheduler.Storage.Mongo.Tests.csproj", "{972CEB4D-510B-4701-B4A2-F14A85F11CC7}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService.Tests", "__Tests\StellaOps.Scheduler.WebService.Tests\StellaOps.Scheduler.WebService.Tests.csproj", "{7B4C9EAC-316E-4890-A715-7BB9C1577F96}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Worker.Tests", "__Tests\StellaOps.Scheduler.Worker.Tests\StellaOps.Scheduler.Worker.Tests.csproj", "{D640DBB2-4251-44B3-B949-75FC6BF02B71}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService.Tests", "__Tests\StellaOps.Scheduler.WebService.Tests\StellaOps.Scheduler.WebService.Tests.csproj", "{7B4C9EAC-316E-4890-A715-7BB9C1577F96}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Backfill.Tests", "__Tests\StellaOps.Scheduler.Backfill.Tests\StellaOps.Scheduler.Backfill.Tests.csproj", "{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Worker.Tests", "__Tests\StellaOps.Scheduler.Worker.Tests\StellaOps.Scheduler.Worker.Tests.csproj", "{D640DBB2-4251-44B3-B949-75FC6BF02B71}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -389,28 +397,67 @@ Global
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x64.Build.0 = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x64.Build.0 = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x86.Build.0 = Release|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Release|x64.Build.0 = Release|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37}.Release|x86.Build.0 = Release|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Release|x64.Build.0 = Release|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512}.Release|x86.Build.0 = Release|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Release|x64.Build.0 = Release|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
EndGlobalSection
|
||||
GlobalSection(NestedProjects) = preSolution
|
||||
{382FA1C0-5F5F-424A-8485-7FED0ADE9F6B} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{33770BC5-6802-45AD-A866-10027DD360E2} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{56209C24-3CE7-4F8E-8B8C-F052CB919DE2} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{C48F2207-8974-43A4-B3D6-6A1761C37605} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{37FA8A12-E96E-4F23-AB72-8FA9DD9DA082} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{5ED2BF16-72CE-4DF1-917C-6D832427AE6F} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{2F097B4B-8F38-45C3-8A42-90250E912F0C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{7C22F6B7-095E-459B-BCCF-87098EA9F192} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{972CEB4D-510B-4701-B4A2-F14A85F11CC7} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{7B4C9EAC-316E-4890-A715-7BB9C1577F96} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{56209C24-3CE7-4F8E-8B8C-F052CB919DE2} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{167198F1-43CF-42F4-BEF2-5ABC87116A37} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{C48F2207-8974-43A4-B3D6-6A1761C37605} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{37FA8A12-E96E-4F23-AB72-8FA9DD9DA082} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{9C1AC284-0561-4E78-9EA8-9B55C3180512} = {694D5197-0F28-46B9-BAA2-EFC9825C23D4}
|
||||
{5ED2BF16-72CE-4DF1-917C-6D832427AE6F} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{2F097B4B-8F38-45C3-8A42-90250E912F0C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{7C22F6B7-095E-459B-BCCF-87098EA9F192} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{972CEB4D-510B-4701-B4A2-F14A85F11CC7} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{7B4C9EAC-316E-4890-A715-7BB9C1577F96} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{B13D1DF0-1B9E-4557-919C-0A4E0FC9A8C7} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{D640DBB2-4251-44B3-B949-75FC6BF02B71} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
20
src/Scheduler/Tools/Scheduler.Backfill/BackfillMappings.cs
Normal file
20
src/Scheduler/Tools/Scheduler.Backfill/BackfillMappings.cs
Normal file
@@ -0,0 +1,20 @@
|
||||
using StellaOps.Scheduler.Models;
|
||||
|
||||
namespace Scheduler.Backfill;
|
||||
|
||||
internal static class BackfillMappings
|
||||
{
|
||||
public static string ToScheduleMode(ScheduleMode mode)
|
||||
=> mode switch
|
||||
{
|
||||
ScheduleMode.AnalysisOnly => "analysisonly",
|
||||
ScheduleMode.ContentRefresh => "contentrefresh",
|
||||
_ => mode.ToString().ToLowerInvariant()
|
||||
};
|
||||
|
||||
public static string ToRunState(RunState state)
|
||||
=> state.ToString().ToLowerInvariant();
|
||||
|
||||
public static string ToRunTrigger(RunTrigger trigger)
|
||||
=> trigger.ToString().ToLowerInvariant();
|
||||
}
|
||||
315
src/Scheduler/Tools/Scheduler.Backfill/Program.cs
Normal file
315
src/Scheduler/Tools/Scheduler.Backfill/Program.cs
Normal file
@@ -0,0 +1,315 @@
|
||||
using System.Text.Json;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Bson.Serialization;
|
||||
using MongoDB.Driver;
|
||||
using Npgsql;
|
||||
using Scheduler.Backfill;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Options;
|
||||
|
||||
var parsed = ParseArgs(args);
|
||||
var options = BackfillOptions.From(parsed.MongoConnection, parsed.MongoDatabase, parsed.PostgresConnection, parsed.BatchSize, parsed.DryRun);
|
||||
|
||||
var runner = new BackfillRunner(options);
|
||||
await runner.RunAsync();
|
||||
return 0;
|
||||
|
||||
static BackfillCliOptions ParseArgs(string[] args)
|
||||
{
|
||||
string? mongo = null;
|
||||
string? mongoDb = null;
|
||||
string? pg = null;
|
||||
int batch = 500;
|
||||
bool dryRun = false;
|
||||
|
||||
for (var i = 0; i < args.Length; i++)
|
||||
{
|
||||
switch (args[i])
|
||||
{
|
||||
case "--mongo" or "-m":
|
||||
mongo = NextValue(args, ref i);
|
||||
break;
|
||||
case "--mongo-db":
|
||||
mongoDb = NextValue(args, ref i);
|
||||
break;
|
||||
case "--pg" or "-p":
|
||||
pg = NextValue(args, ref i);
|
||||
break;
|
||||
case "--batch":
|
||||
batch = int.TryParse(NextValue(args, ref i), out var b) ? b : 500;
|
||||
break;
|
||||
case "--dry-run":
|
||||
dryRun = true;
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return new BackfillCliOptions(mongo, mongoDb, pg, batch, dryRun);
|
||||
}
|
||||
|
||||
static string NextValue(string[] args, ref int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
index++;
|
||||
return args[index];
|
||||
}
|
||||
|
||||
internal sealed record BackfillCliOptions(
|
||||
string? MongoConnection,
|
||||
string? MongoDatabase,
|
||||
string? PostgresConnection,
|
||||
int BatchSize,
|
||||
bool DryRun);
|
||||
|
||||
internal sealed record BackfillOptions(
|
||||
string MongoConnectionString,
|
||||
string MongoDatabase,
|
||||
string PostgresConnectionString,
|
||||
int BatchSize,
|
||||
bool DryRun)
|
||||
{
|
||||
public static BackfillOptions From(string? mongoConn, string? mongoDb, string pgConn, int batchSize, bool dryRun)
|
||||
{
|
||||
var mongoOptions = new SchedulerMongoOptions();
|
||||
var conn = string.IsNullOrWhiteSpace(mongoConn)
|
||||
? Environment.GetEnvironmentVariable("MONGO_CONNECTION_STRING") ?? mongoOptions.ConnectionString
|
||||
: mongoConn;
|
||||
|
||||
var database = string.IsNullOrWhiteSpace(mongoDb)
|
||||
? Environment.GetEnvironmentVariable("MONGO_DATABASE") ?? mongoOptions.Database
|
||||
: mongoDb!;
|
||||
|
||||
var pg = string.IsNullOrWhiteSpace(pgConn)
|
||||
? throw new ArgumentException("PostgreSQL connection string is required (--pg or POSTGRES_CONNECTION_STRING)")
|
||||
: pgConn;
|
||||
|
||||
if (string.IsNullOrWhiteSpace(pg) && Environment.GetEnvironmentVariable("POSTGRES_CONNECTION_STRING") is { } envPg)
|
||||
{
|
||||
pg = envPg;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(pg))
|
||||
{
|
||||
throw new ArgumentException("PostgreSQL connection string is required.");
|
||||
}
|
||||
|
||||
return new BackfillOptions(conn, database, pg, Math.Max(50, batchSize), dryRun);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class BackfillRunner
|
||||
{
|
||||
private readonly BackfillOptions _options;
|
||||
private readonly IMongoDatabase _mongo;
|
||||
private readonly NpgsqlDataSource _pg;
|
||||
|
||||
public BackfillRunner(BackfillOptions options)
|
||||
{
|
||||
_options = options;
|
||||
_mongo = new MongoClient(options.MongoConnectionString).GetDatabase(options.MongoDatabase);
|
||||
_pg = NpgsqlDataSource.Create(options.PostgresConnectionString);
|
||||
}
|
||||
|
||||
public async Task RunAsync()
|
||||
{
|
||||
Console.WriteLine($"Mongo -> Postgres backfill starting (dry-run={_options.DryRun})");
|
||||
await BackfillSchedulesAsync();
|
||||
await BackfillRunsAsync();
|
||||
Console.WriteLine("Backfill complete.");
|
||||
}
|
||||
|
||||
private async Task BackfillSchedulesAsync()
|
||||
{
|
||||
var collection = _mongo.GetCollection<BsonDocument>(new SchedulerMongoOptions().SchedulesCollection);
|
||||
using var cursor = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToCursorAsync();
|
||||
|
||||
var batch = new List<Schedule>(_options.BatchSize);
|
||||
long total = 0;
|
||||
|
||||
while (await cursor.MoveNextAsync())
|
||||
{
|
||||
foreach (var doc in cursor.Current)
|
||||
{
|
||||
var schedule = BsonSerializer.Deserialize<Schedule>(doc);
|
||||
batch.Add(schedule);
|
||||
if (batch.Count >= _options.BatchSize)
|
||||
{
|
||||
total += await PersistSchedulesAsync(batch);
|
||||
batch.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
total += await PersistSchedulesAsync(batch);
|
||||
}
|
||||
|
||||
Console.WriteLine($"Schedules backfilled: {total}");
|
||||
}
|
||||
|
||||
private async Task<long> PersistSchedulesAsync(IEnumerable<Schedule> schedules)
|
||||
{
|
||||
if (_options.DryRun)
|
||||
{
|
||||
return schedules.LongCount();
|
||||
}
|
||||
|
||||
await using var conn = await _pg.OpenConnectionAsync();
|
||||
await using var tx = await conn.BeginTransactionAsync();
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO scheduler.schedules (
|
||||
id, tenant_id, name, description, enabled, cron_expression, timezone, mode,
|
||||
selection, only_if, notify, limits, subscribers, created_at, created_by, updated_at, updated_by, deleted_at, deleted_by)
|
||||
VALUES (
|
||||
@id, @tenant_id, @name, @description, @enabled, @cron_expression, @timezone, @mode,
|
||||
@selection, @only_if, @notify, @limits, @subscribers, @created_at, @created_by, @updated_at, @updated_by, @deleted_at, @deleted_by)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
name = EXCLUDED.name,
|
||||
description = EXCLUDED.description,
|
||||
enabled = EXCLUDED.enabled,
|
||||
cron_expression = EXCLUDED.cron_expression,
|
||||
timezone = EXCLUDED.timezone,
|
||||
mode = EXCLUDED.mode,
|
||||
selection = EXCLUDED.selection,
|
||||
only_if = EXCLUDED.only_if,
|
||||
notify = EXCLUDED.notify,
|
||||
limits = EXCLUDED.limits,
|
||||
subscribers = EXCLUDED.subscribers,
|
||||
created_at = LEAST(scheduler.schedules.created_at, EXCLUDED.created_at),
|
||||
created_by = EXCLUDED.created_by,
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
updated_by = EXCLUDED.updated_by,
|
||||
deleted_at = EXCLUDED.deleted_at,
|
||||
deleted_by = EXCLUDED.deleted_by;";
|
||||
|
||||
var affected = 0;
|
||||
foreach (var schedule in schedules)
|
||||
{
|
||||
await using var cmd = new NpgsqlCommand(sql, conn, tx);
|
||||
cmd.Parameters.AddWithValue("id", schedule.Id);
|
||||
cmd.Parameters.AddWithValue("tenant_id", schedule.TenantId);
|
||||
cmd.Parameters.AddWithValue("name", schedule.Name);
|
||||
cmd.Parameters.AddWithValue("description", DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("enabled", schedule.Enabled);
|
||||
cmd.Parameters.AddWithValue("cron_expression", schedule.CronExpression);
|
||||
cmd.Parameters.AddWithValue("timezone", schedule.Timezone);
|
||||
cmd.Parameters.AddWithValue("mode", BackfillMappings.ToScheduleMode(schedule.Mode));
|
||||
cmd.Parameters.AddWithValue("selection", CanonicalJsonSerializer.Serialize(schedule.Selection));
|
||||
cmd.Parameters.AddWithValue("only_if", CanonicalJsonSerializer.Serialize(schedule.OnlyIf));
|
||||
cmd.Parameters.AddWithValue("notify", CanonicalJsonSerializer.Serialize(schedule.Notify));
|
||||
cmd.Parameters.AddWithValue("limits", CanonicalJsonSerializer.Serialize(schedule.Limits));
|
||||
cmd.Parameters.AddWithValue("subscribers", schedule.Subscribers.ToArray());
|
||||
cmd.Parameters.AddWithValue("created_at", schedule.CreatedAt.UtcDateTime);
|
||||
cmd.Parameters.AddWithValue("created_by", schedule.CreatedBy);
|
||||
cmd.Parameters.AddWithValue("updated_at", schedule.UpdatedAt.UtcDateTime);
|
||||
cmd.Parameters.AddWithValue("updated_by", schedule.UpdatedBy);
|
||||
cmd.Parameters.AddWithValue("deleted_at", DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("deleted_by", DBNull.Value);
|
||||
|
||||
affected += await cmd.ExecuteNonQueryAsync();
|
||||
}
|
||||
|
||||
await tx.CommitAsync();
|
||||
return affected;
|
||||
}
|
||||
|
||||
private async Task BackfillRunsAsync()
|
||||
{
|
||||
var collection = _mongo.GetCollection<BsonDocument>(new SchedulerMongoOptions().RunsCollection);
|
||||
using var cursor = await collection.Find(FilterDefinition<BsonDocument>.Empty).ToCursorAsync();
|
||||
|
||||
var batch = new List<Run>(_options.BatchSize);
|
||||
long total = 0;
|
||||
|
||||
while (await cursor.MoveNextAsync())
|
||||
{
|
||||
foreach (var doc in cursor.Current)
|
||||
{
|
||||
var run = BsonSerializer.Deserialize<Run>(doc);
|
||||
batch.Add(run);
|
||||
if (batch.Count >= _options.BatchSize)
|
||||
{
|
||||
total += await PersistRunsAsync(batch);
|
||||
batch.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
total += await PersistRunsAsync(batch);
|
||||
}
|
||||
|
||||
Console.WriteLine($"Runs backfilled: {total}");
|
||||
}
|
||||
|
||||
private async Task<long> PersistRunsAsync(IEnumerable<Run> runs)
|
||||
{
|
||||
if (_options.DryRun)
|
||||
{
|
||||
return runs.LongCount();
|
||||
}
|
||||
|
||||
await using var conn = await _pg.OpenConnectionAsync();
|
||||
await using var tx = await conn.BeginTransactionAsync();
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO scheduler.runs (
|
||||
id, tenant_id, schedule_id, state, trigger, stats, deltas, reason, retry_of,
|
||||
created_at, started_at, finished_at, error, created_by, updated_at, metadata)
|
||||
VALUES (
|
||||
@id, @tenant_id, @schedule_id, @state, @trigger, @stats, @deltas, @reason, @retry_of,
|
||||
@created_at, @started_at, @finished_at, @error, @created_by, @updated_at, @metadata)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
tenant_id = EXCLUDED.tenant_id,
|
||||
schedule_id = EXCLUDED.schedule_id,
|
||||
state = EXCLUDED.state,
|
||||
trigger = EXCLUDED.trigger,
|
||||
stats = EXCLUDED.stats,
|
||||
deltas = EXCLUDED.deltas,
|
||||
reason = EXCLUDED.reason,
|
||||
retry_of = EXCLUDED.retry_of,
|
||||
created_at = LEAST(scheduler.runs.created_at, EXCLUDED.created_at),
|
||||
started_at = EXCLUDED.started_at,
|
||||
finished_at = EXCLUDED.finished_at,
|
||||
error = EXCLUDED.error,
|
||||
created_by = COALESCE(EXCLUDED.created_by, scheduler.runs.created_by),
|
||||
updated_at = EXCLUDED.updated_at,
|
||||
metadata = EXCLUDED.metadata;";
|
||||
|
||||
var affected = 0;
|
||||
foreach (var run in runs)
|
||||
{
|
||||
await using var cmd = new NpgsqlCommand(sql, conn, tx);
|
||||
cmd.Parameters.AddWithValue("id", run.Id);
|
||||
cmd.Parameters.AddWithValue("tenant_id", run.TenantId);
|
||||
cmd.Parameters.AddWithValue("schedule_id", (object?)run.ScheduleId ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("state", BackfillMappings.ToRunState(run.State));
|
||||
cmd.Parameters.AddWithValue("trigger", BackfillMappings.ToRunTrigger(run.Trigger));
|
||||
cmd.Parameters.AddWithValue("stats", CanonicalJsonSerializer.Serialize(run.Stats));
|
||||
cmd.Parameters.AddWithValue("deltas", CanonicalJsonSerializer.Serialize(run.Deltas));
|
||||
cmd.Parameters.AddWithValue("reason", CanonicalJsonSerializer.Serialize(run.Reason));
|
||||
cmd.Parameters.AddWithValue("retry_of", (object?)run.RetryOf ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("created_at", run.CreatedAt.UtcDateTime);
|
||||
cmd.Parameters.AddWithValue("started_at", (object?)run.StartedAt?.UtcDateTime ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("finished_at", (object?)run.FinishedAt?.UtcDateTime ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("error", (object?)run.Error ?? DBNull.Value);
|
||||
cmd.Parameters.AddWithValue("created_by", (object?)run.Reason?.ManualReason ?? "system");
|
||||
cmd.Parameters.AddWithValue("updated_at", DateTime.UtcNow);
|
||||
cmd.Parameters.AddWithValue("metadata", JsonSerializer.Serialize(new { schema = run.SchemaVersion }));
|
||||
|
||||
affected += await cmd.ExecuteNonQueryAsync();
|
||||
}
|
||||
|
||||
await tx.CommitAsync();
|
||||
return affected;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Scheduler.Backfill.Tests")]
|
||||
@@ -0,0 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Storage.Postgres/StellaOps.Scheduler.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="Npgsql" Version="9.0.2" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -170,3 +170,192 @@ $$ LANGUAGE plpgsql;
|
||||
CREATE TRIGGER trg_triggers_updated_at
|
||||
BEFORE UPDATE ON scheduler.triggers
|
||||
FOR EACH ROW EXECUTE FUNCTION scheduler.update_updated_at();
|
||||
|
||||
-- Schedules table (control-plane schedules)
|
||||
CREATE TABLE IF NOT EXISTS scheduler.schedules (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
enabled BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
cron_expression TEXT,
|
||||
timezone TEXT NOT NULL DEFAULT 'UTC',
|
||||
mode TEXT NOT NULL CHECK (mode IN ('analysisonly', 'contentrefresh')),
|
||||
selection JSONB NOT NULL DEFAULT '{}',
|
||||
only_if JSONB NOT NULL DEFAULT '{}',
|
||||
notify JSONB NOT NULL DEFAULT '{}',
|
||||
limits JSONB NOT NULL DEFAULT '{}',
|
||||
subscribers TEXT[] NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
created_by TEXT NOT NULL,
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_by TEXT NOT NULL,
|
||||
deleted_at TIMESTAMPTZ,
|
||||
deleted_by TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_schedules_tenant ON scheduler.schedules(tenant_id) WHERE deleted_at IS NULL;
|
||||
CREATE INDEX IF NOT EXISTS idx_schedules_enabled ON scheduler.schedules(tenant_id, enabled) WHERE deleted_at IS NULL;
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS uq_schedules_tenant_name_active ON scheduler.schedules(tenant_id, name) WHERE deleted_at IS NULL;
|
||||
|
||||
-- Runs table (execution records)
|
||||
CREATE TABLE IF NOT EXISTS scheduler.runs (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
schedule_id TEXT REFERENCES scheduler.schedules(id),
|
||||
state TEXT NOT NULL CHECK (state IN ('planning','queued','running','completed','error','cancelled')),
|
||||
trigger TEXT NOT NULL,
|
||||
stats JSONB NOT NULL DEFAULT '{}',
|
||||
deltas JSONB NOT NULL DEFAULT '[]',
|
||||
reason JSONB NOT NULL DEFAULT '{}',
|
||||
retry_of TEXT REFERENCES scheduler.runs(id),
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
started_at TIMESTAMPTZ,
|
||||
finished_at TIMESTAMPTZ,
|
||||
error TEXT,
|
||||
created_by TEXT,
|
||||
updated_at TIMESTAMPTZ,
|
||||
metadata JSONB NOT NULL DEFAULT '{}'
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_runs_tenant_state ON scheduler.runs(tenant_id, state);
|
||||
CREATE INDEX IF NOT EXISTS idx_runs_schedule ON scheduler.runs(schedule_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_runs_created ON scheduler.runs(created_at DESC);
|
||||
|
||||
-- Graph jobs table
|
||||
CREATE TABLE IF NOT EXISTS scheduler.graph_jobs (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
sbom_id TEXT NOT NULL,
|
||||
sbom_version_id TEXT,
|
||||
sbom_digest TEXT NOT NULL,
|
||||
graph_snapshot_id TEXT,
|
||||
status TEXT NOT NULL CHECK (status IN ('pending','queued','running','completed','failed','cancelled')),
|
||||
trigger TEXT NOT NULL CHECK (trigger IN ('sbom-version','backfill','manual')),
|
||||
priority INT NOT NULL DEFAULT 100,
|
||||
attempts INT NOT NULL DEFAULT 0,
|
||||
max_attempts INT NOT NULL DEFAULT 3,
|
||||
cartographer_job_id TEXT,
|
||||
correlation_id TEXT,
|
||||
metadata JSONB NOT NULL DEFAULT '{}',
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
started_at TIMESTAMPTZ,
|
||||
completed_at TIMESTAMPTZ,
|
||||
error TEXT,
|
||||
error_details JSONB
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_jobs_tenant_status ON scheduler.graph_jobs(tenant_id, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_graph_jobs_sbom ON scheduler.graph_jobs(sbom_digest);
|
||||
|
||||
-- Policy run jobs table
|
||||
CREATE TABLE IF NOT EXISTS scheduler.policy_jobs (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
policy_pack_id TEXT NOT NULL,
|
||||
policy_version INT,
|
||||
target_type TEXT NOT NULL,
|
||||
target_id TEXT NOT NULL,
|
||||
status TEXT NOT NULL CHECK (status IN ('pending','queued','running','completed','failed','cancelled')),
|
||||
priority INT NOT NULL DEFAULT 100,
|
||||
run_id TEXT,
|
||||
requested_by TEXT,
|
||||
mode TEXT,
|
||||
metadata JSONB NOT NULL DEFAULT '{}',
|
||||
inputs JSONB NOT NULL DEFAULT '{}',
|
||||
attempt_count INT NOT NULL DEFAULT 0,
|
||||
max_attempts INT NOT NULL DEFAULT 3,
|
||||
queued_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
available_at TIMESTAMPTZ,
|
||||
submitted_at TIMESTAMPTZ,
|
||||
started_at TIMESTAMPTZ,
|
||||
completed_at TIMESTAMPTZ,
|
||||
cancellation_requested BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
cancellation_reason TEXT,
|
||||
cancelled_at TIMESTAMPTZ,
|
||||
last_attempt_at TIMESTAMPTZ,
|
||||
last_error TEXT,
|
||||
lease_owner TEXT,
|
||||
lease_expires_at TIMESTAMPTZ,
|
||||
correlation_id TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_policy_jobs_tenant_status ON scheduler.policy_jobs(tenant_id, status);
|
||||
CREATE INDEX IF NOT EXISTS idx_policy_jobs_run ON scheduler.policy_jobs(run_id);
|
||||
|
||||
-- Impact snapshots table
|
||||
CREATE TABLE IF NOT EXISTS scheduler.impact_snapshots (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
run_id TEXT NOT NULL REFERENCES scheduler.runs(id) ON DELETE CASCADE,
|
||||
image_digest TEXT NOT NULL,
|
||||
image_reference TEXT,
|
||||
new_findings INT NOT NULL DEFAULT 0,
|
||||
new_criticals INT NOT NULL DEFAULT 0,
|
||||
new_high INT NOT NULL DEFAULT 0,
|
||||
new_medium INT NOT NULL DEFAULT 0,
|
||||
new_low INT NOT NULL DEFAULT 0,
|
||||
total_findings INT NOT NULL DEFAULT 0,
|
||||
kev_hits TEXT[] NOT NULL DEFAULT '{}',
|
||||
top_findings JSONB NOT NULL DEFAULT '[]',
|
||||
report_url TEXT,
|
||||
attestation JSONB NOT NULL DEFAULT '{}',
|
||||
detected_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_impact_snapshots_run ON scheduler.impact_snapshots(run_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_impact_snapshots_tenant ON scheduler.impact_snapshots(tenant_id, detected_at DESC);
|
||||
|
||||
-- Execution logs table
|
||||
CREATE TABLE IF NOT EXISTS scheduler.execution_logs (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
run_id TEXT NOT NULL REFERENCES scheduler.runs(id) ON DELETE CASCADE,
|
||||
logged_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
level TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
logger TEXT,
|
||||
data JSONB NOT NULL DEFAULT '{}'
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_execution_logs_run ON scheduler.execution_logs(run_id);
|
||||
|
||||
-- Run summaries table
|
||||
CREATE TABLE IF NOT EXISTS scheduler.run_summaries (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
schedule_id TEXT REFERENCES scheduler.schedules(id),
|
||||
period_start TIMESTAMPTZ NOT NULL,
|
||||
period_end TIMESTAMPTZ NOT NULL,
|
||||
total_runs INT NOT NULL DEFAULT 0,
|
||||
successful_runs INT NOT NULL DEFAULT 0,
|
||||
failed_runs INT NOT NULL DEFAULT 0,
|
||||
cancelled_runs INT NOT NULL DEFAULT 0,
|
||||
avg_duration_seconds NUMERIC(10,2),
|
||||
max_duration_seconds INT,
|
||||
min_duration_seconds INT,
|
||||
total_findings_detected INT NOT NULL DEFAULT 0,
|
||||
new_criticals INT NOT NULL DEFAULT 0,
|
||||
computed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
UNIQUE (tenant_id, schedule_id, period_start)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_run_summaries_tenant ON scheduler.run_summaries(tenant_id, period_start DESC);
|
||||
|
||||
-- Audit table
|
||||
CREATE TABLE IF NOT EXISTS scheduler.audit (
|
||||
id TEXT PRIMARY KEY,
|
||||
tenant_id TEXT NOT NULL,
|
||||
action TEXT NOT NULL,
|
||||
entity_type TEXT NOT NULL,
|
||||
entity_id TEXT NOT NULL,
|
||||
actor TEXT,
|
||||
actor_type TEXT,
|
||||
old_value JSONB,
|
||||
new_value JSONB,
|
||||
details JSONB NOT NULL DEFAULT '{}',
|
||||
ip_address INET,
|
||||
occurred_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_tenant_time ON scheduler.audit(tenant_id, occurred_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_entity ON scheduler.audit(entity_type, entity_id);
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
using FluentAssertions;
|
||||
using Scheduler.Backfill;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.Backfill.Tests;
|
||||
|
||||
public class BackfillMappingsTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(ScheduleMode.AnalysisOnly, "analysisonly")]
|
||||
[InlineData(ScheduleMode.ContentRefresh, "contentrefresh")]
|
||||
public void ScheduleMode_is_lower_snake(ScheduleMode mode, string expected)
|
||||
{
|
||||
BackfillMappings.ToScheduleMode(mode).Should().Be(expected);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(RunState.Planning, "planning")]
|
||||
[InlineData(RunState.Completed, "completed")]
|
||||
[InlineData(RunState.Cancelled, "cancelled")]
|
||||
public void RunState_is_lower(RunState state, string expected)
|
||||
{
|
||||
BackfillMappings.ToRunState(state).Should().Be(expected);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(RunTrigger.Cron, "cron")]
|
||||
[InlineData(RunTrigger.Manual, "manual")]
|
||||
public void RunTrigger_is_lower(RunTrigger trigger, string expected)
|
||||
{
|
||||
BackfillMappings.ToRunTrigger(trigger).Should().Be(expected);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="xunit" Version="2.6.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.6.3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../Tools/Scheduler.Backfill/Scheduler.Backfill.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -43,22 +43,39 @@ public sealed class BundleIngestionStepExecutor : IPackRunStepExecutor
|
||||
}
|
||||
|
||||
var checksum = GetString(parameters, "checksum") ?? GetString(parameters, "checksumSha256");
|
||||
if (!string.IsNullOrWhiteSpace(checksum))
|
||||
if (string.IsNullOrWhiteSpace(checksum))
|
||||
{
|
||||
var actual = ComputeSha256(sourcePath);
|
||||
if (!checksum.Equals(actual, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return Task.FromResult(PackRunStepExecutionResult.Failure($"Checksum mismatch: expected {checksum}, actual {actual}."));
|
||||
}
|
||||
return Task.FromResult(PackRunStepExecutionResult.Failure("Checksum is required for bundle ingestion."));
|
||||
}
|
||||
|
||||
var actual = ComputeSha256(sourcePath);
|
||||
if (!checksum.Equals(actual, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return Task.FromResult(PackRunStepExecutionResult.Failure($"Checksum mismatch: expected {checksum}, actual {actual}."));
|
||||
}
|
||||
|
||||
var deterministicDir = Path.Combine(stagingRoot, checksum.ToLowerInvariant());
|
||||
var destination = GetString(parameters, "destinationPath")
|
||||
?? Path.Combine(stagingRoot, Path.GetFileName(sourcePath));
|
||||
?? Path.Combine(deterministicDir, Path.GetFileName(sourcePath));
|
||||
|
||||
try
|
||||
{
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(destination)!);
|
||||
File.Copy(sourcePath, destination, overwrite: true);
|
||||
|
||||
// Persist deterministic metadata for downstream evidence
|
||||
var metadataPath = Path.Combine(deterministicDir, "metadata.json");
|
||||
var metadata = new
|
||||
{
|
||||
source = Path.GetFullPath(sourcePath),
|
||||
checksumSha256 = checksum.ToLowerInvariant(),
|
||||
stagedPath = Path.GetFullPath(destination)
|
||||
};
|
||||
var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
});
|
||||
File.WriteAllText(metadataPath, json);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -15,6 +15,7 @@ public sealed class BundleIngestionStepExecutorTests
|
||||
using var temp = new TempDirectory();
|
||||
var source = Path.Combine(temp.Path, "bundle.tgz");
|
||||
await File.WriteAllTextAsync(source, "bundle-data");
|
||||
var checksum = "3e25960a79dbc69b674cd4ec67a72c62b3aa32b1d4d216177a5ffcc6f46673b5"; // sha256 of "bundle-data"
|
||||
|
||||
var options = Options.Create(new PackRunWorkerOptions { ArtifactsPath = temp.Path });
|
||||
var executor = new BundleIngestionStepExecutor(options, NullLogger<BundleIngestionStepExecutor>.Instance);
|
||||
@@ -22,15 +23,20 @@ public sealed class BundleIngestionStepExecutorTests
|
||||
var step = CreateStep("builtin:bundle.ingest", new Dictionary<string, TaskPackPlanParameterValue>
|
||||
{
|
||||
["path"] = Value(source),
|
||||
["checksum"] = Value("3e25960a79dbc69b674cd4ec67a72c62b3aa32b1d4d216177a5ffcc6f46673b5") // sha256 of "bundle-data"
|
||||
["checksum"] = Value(checksum)
|
||||
});
|
||||
|
||||
var result = await executor.ExecuteAsync(step, step.Parameters, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Succeeded);
|
||||
var staged = Path.Combine(temp.Path, "bundles", "bundle.tgz");
|
||||
var staged = Path.Combine(temp.Path, "bundles", checksum, "bundle.tgz");
|
||||
Assert.True(File.Exists(staged));
|
||||
Assert.Equal(await File.ReadAllBytesAsync(source), await File.ReadAllBytesAsync(staged));
|
||||
|
||||
var metadataPath = Path.Combine(temp.Path, "bundles", checksum, "metadata.json");
|
||||
Assert.True(File.Exists(metadataPath));
|
||||
var metadata = await File.ReadAllTextAsync(metadataPath);
|
||||
Assert.Contains(checksum, metadata, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -55,6 +61,27 @@ public sealed class BundleIngestionStepExecutorTests
|
||||
Assert.Contains("Checksum mismatch", result.Error, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_MissingChecksum_Fails()
|
||||
{
|
||||
using var temp = new TempDirectory();
|
||||
var source = Path.Combine(temp.Path, "bundle.tgz");
|
||||
await File.WriteAllTextAsync(source, "bundle-data");
|
||||
|
||||
var options = Options.Create(new PackRunWorkerOptions { ArtifactsPath = temp.Path });
|
||||
var executor = new BundleIngestionStepExecutor(options, NullLogger<BundleIngestionStepExecutor>.Instance);
|
||||
|
||||
var step = CreateStep("builtin:bundle.ingest", new Dictionary<string, TaskPackPlanParameterValue>
|
||||
{
|
||||
["path"] = Value(source)
|
||||
});
|
||||
|
||||
var result = await executor.ExecuteAsync(step, step.Parameters, CancellationToken.None);
|
||||
|
||||
Assert.False(result.Succeeded);
|
||||
Assert.Contains("Checksum is required", result.Error, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteAsync_UnknownUses_NoOpSuccess()
|
||||
{
|
||||
|
||||
@@ -6,6 +6,7 @@ using StellaOps.Zastava.Observer.Configuration;
|
||||
using StellaOps.Zastava.Observer.ContainerRuntime;
|
||||
using StellaOps.Zastava.Observer.ContainerRuntime.Cri;
|
||||
using StellaOps.Zastava.Observer.Runtime;
|
||||
using StellaOps.Zastava.Core.Security;
|
||||
|
||||
namespace StellaOps.Zastava.Observer.Worker;
|
||||
|
||||
@@ -65,7 +66,9 @@ internal static class RuntimeEventFactory
|
||||
Annotations = annotations.Count == 0 ? null : new SortedDictionary<string, string>(annotations, StringComparer.Ordinal)
|
||||
};
|
||||
|
||||
return RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent);
|
||||
var envelope = RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent);
|
||||
ZastavaContractValidator.ValidateRuntimeEvent(envelope);
|
||||
return envelope;
|
||||
}
|
||||
|
||||
private static string ResolvePlatform(IReadOnlyDictionary<string, string> labels, ContainerRuntimeEndpointOptions endpoint)
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Text.Json;
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
using StellaOps.Zastava.Core.Hashing;
|
||||
using StellaOps.Zastava.Core.Serialization;
|
||||
using StellaOps.Zastava.Core.Security;
|
||||
|
||||
namespace StellaOps.Zastava.Webhook.Admission;
|
||||
|
||||
@@ -16,6 +17,7 @@ internal sealed class AdmissionResponseBuilder
|
||||
{
|
||||
var decision = BuildDecision(context, evaluation);
|
||||
var envelope = AdmissionDecisionEnvelope.Create(decision, ZastavaContractVersions.AdmissionDecision);
|
||||
ZastavaContractValidator.ValidateAdmissionDecision(envelope);
|
||||
var auditAnnotations = CreateAuditAnnotations(envelope, evaluation);
|
||||
|
||||
var warnings = BuildWarnings(evaluation);
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
using StellaOps.Zastava.Core.Contracts;
|
||||
|
||||
namespace StellaOps.Zastava.Core.Security;
|
||||
|
||||
/// <summary>
|
||||
/// Lightweight, deterministic guards for Zastava runtime and admission contracts.
|
||||
/// </summary>
|
||||
public static class ZastavaContractValidator
|
||||
{
|
||||
public static void ValidateRuntimeEvent(RuntimeEventEnvelope envelope)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(envelope);
|
||||
ArgumentNullException.ThrowIfNull(envelope.Event);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(envelope.Event.Tenant))
|
||||
{
|
||||
throw new ArgumentException("tenant must be set on runtime events", nameof(envelope));
|
||||
}
|
||||
|
||||
if (envelope.Event.When.Offset != TimeSpan.Zero)
|
||||
{
|
||||
throw new ArgumentException("runtime event timestamps must be UTC", nameof(envelope));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(envelope.Event.EventId))
|
||||
{
|
||||
throw new ArgumentException("runtime event ID must be populated", nameof(envelope));
|
||||
}
|
||||
}
|
||||
|
||||
public static void ValidateAdmissionDecision(AdmissionDecisionEnvelope envelope)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(envelope);
|
||||
ArgumentNullException.ThrowIfNull(envelope.Decision);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(envelope.Decision.Namespace))
|
||||
{
|
||||
throw new ArgumentException("admission namespace is required", nameof(envelope));
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(envelope.Decision.PodSpecDigest))
|
||||
{
|
||||
throw new ArgumentException("podSpecDigest must be set on admission decisions", nameof(envelope));
|
||||
}
|
||||
|
||||
if (envelope.Decision.Images is null || envelope.Decision.Images.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("at least one image verdict is required", nameof(envelope));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,8 +13,8 @@ namespace StellaOps.Zastava.Observer.Tests.Worker;
|
||||
public sealed class RuntimeEventFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void Create_AttachesBuildIdFromProcessCapture()
|
||||
{
|
||||
public void Create_AttachesBuildIdFromProcessCapture()
|
||||
{
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
var snapshot = new CriContainerInfo(
|
||||
Id: "container-a",
|
||||
@@ -68,7 +68,45 @@ public sealed class RuntimeEventFactoryTests
|
||||
posture: null,
|
||||
additionalEvidence: null);
|
||||
|
||||
Assert.NotNull(envelope.Event.Process);
|
||||
Assert.Equal("5f0c7c3cb4d9f8a4", envelope.Event.Process!.BuildId);
|
||||
}
|
||||
}
|
||||
Assert.NotNull(envelope.Event.Process);
|
||||
Assert.Equal("5f0c7c3cb4d9f8a4", envelope.Event.Process!.BuildId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Create_ThrowsWhenTenantMissing()
|
||||
{
|
||||
var timestamp = DateTimeOffset.UtcNow;
|
||||
var snapshot = new CriContainerInfo(
|
||||
Id: "container-b",
|
||||
PodSandboxId: "sandbox-b",
|
||||
Name: "api",
|
||||
Attempt: 1,
|
||||
Image: "ghcr.io/example/api:1.0",
|
||||
ImageRef: "ghcr.io/example/api@sha256:deadbeef",
|
||||
Labels: new Dictionary<string, string>(),
|
||||
Annotations: new Dictionary<string, string>(),
|
||||
CreatedAt: timestamp,
|
||||
StartedAt: timestamp,
|
||||
FinishedAt: null,
|
||||
ExitCode: null,
|
||||
Reason: null,
|
||||
Message: null,
|
||||
Pid: 1111);
|
||||
|
||||
var lifecycleEvent = new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot);
|
||||
var endpoint = new ContainerRuntimeEndpointOptions
|
||||
{
|
||||
Engine = ContainerRuntimeEngine.Containerd,
|
||||
Endpoint = "unix:///run/containerd/containerd.sock",
|
||||
Name = "containerd"
|
||||
};
|
||||
var identity = new CriRuntimeIdentity("containerd", "1.7.19", "v1");
|
||||
|
||||
Assert.Throws<ArgumentException>(() => RuntimeEventFactory.Create(
|
||||
lifecycleEvent,
|
||||
endpoint,
|
||||
identity,
|
||||
tenant: " ",
|
||||
nodeName: "node-1"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -132,4 +132,60 @@ public sealed class AdmissionResponseBuilderTests
|
||||
Assert.NotNull(response.Response.AuditAnnotations);
|
||||
Assert.Contains("zastava.stellaops/admission", response.Response.AuditAnnotations!.Keys);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_ThrowsWhenNamespaceMissing()
|
||||
{
|
||||
using var document = JsonDocument.Parse("""
|
||||
{
|
||||
"metadata": { "namespace": "" },
|
||||
"spec": {
|
||||
"containers": [ { "name": "api", "image": "ghcr.io/example/api:1.0" } ]
|
||||
}
|
||||
}
|
||||
""");
|
||||
|
||||
var pod = document.RootElement;
|
||||
var spec = pod.GetProperty("spec");
|
||||
|
||||
var context = new AdmissionRequestContext(
|
||||
ApiVersion: "admission.k8s.io/v1",
|
||||
Kind: "AdmissionReview",
|
||||
Uid: "abc",
|
||||
Namespace: string.Empty,
|
||||
Labels: new Dictionary<string, string>(),
|
||||
Containers: new[] { new AdmissionContainerReference("api", "ghcr.io/example/api:1.0") },
|
||||
PodObject: pod,
|
||||
PodSpec: spec);
|
||||
|
||||
var evaluation = new RuntimeAdmissionEvaluation
|
||||
{
|
||||
Decisions = new[]
|
||||
{
|
||||
new RuntimeAdmissionDecision
|
||||
{
|
||||
OriginalImage = "ghcr.io/example/api:1.0",
|
||||
ResolvedDigest = "ghcr.io/example/api@sha256:deadbeef",
|
||||
Verdict = PolicyVerdict.Pass,
|
||||
Allowed = true,
|
||||
Policy = new RuntimePolicyImageResult
|
||||
{
|
||||
PolicyVerdict = PolicyVerdict.Pass,
|
||||
HasSbom = true,
|
||||
Signed = true
|
||||
},
|
||||
Reasons = Array.Empty<string>(),
|
||||
FromCache = false,
|
||||
ResolutionFailed = false
|
||||
}
|
||||
},
|
||||
BackendFailed = false,
|
||||
FailOpenApplied = false,
|
||||
FailureReason = null,
|
||||
TtlSeconds = 300
|
||||
};
|
||||
|
||||
var builder = new AdmissionResponseBuilder();
|
||||
Assert.Throws<ArgumentException>(() => builder.Build(context, evaluation));
|
||||
}
|
||||
}
|
||||
|
||||
32
src/__Libraries/StellaOps.Infrastructure.Postgres/AGENTS.md
Normal file
32
src/__Libraries/StellaOps.Infrastructure.Postgres/AGENTS.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# StellaOps.Infrastructure.Postgres — AGENTS
|
||||
|
||||
## Roles
|
||||
- Backend engineer: maintain the shared PostgreSQL infrastructure primitives (DataSourceBase, RepositoryBase, MigrationRunner, options/DI helpers).
|
||||
- QA automation: own Postgres Testcontainers coverage, tenant-context/RLS checks, and migration idempotency tests.
|
||||
- DevOps liaison: keep provisioning values in `ops/devops/postgres` aligned with library defaults (timeouts, schema names, TLS, pooling).
|
||||
|
||||
## Required Reading
|
||||
- docs/db/README.md, SPECIFICATION.md, RULES.md, VERIFICATION.md, CONVERSION_PLAN.md
|
||||
- docs/modules/platform/architecture-overview.md
|
||||
- docs/airgap/airgap-mode.md
|
||||
- ops/devops/AGENTS.md (DevOps working agreement)
|
||||
|
||||
## Working Directory & Scope
|
||||
- Primary: `src/__Libraries/StellaOps.Infrastructure.Postgres`
|
||||
- Allowed shared libs/tests: `src/__Libraries/StellaOps.Infrastructure.Postgres.Testing`, `src/__Libraries/__Tests/StellaOps.Infrastructure.Postgres.Tests`
|
||||
- Cross-module storage projects may reference this library but edits to them must be recorded in the owning sprint before touching.
|
||||
|
||||
## Determinism & Guardrails
|
||||
- Target runtime: .NET 10, Npgsql 9.x; keep options defaults deterministic (UTC timezone, statement timeout, stable pagination ordering).
|
||||
- Tenant context must be set via `set_config('app.current_tenant', ...)` on every connection before use; never bypass DataSourceBase.
|
||||
- Migrations ship as embedded resources; MigrationRunner uses SHA256 checksums and `RunFromAssemblyAsync`—do not execute ad-hoc SQL outside tracked migrations.
|
||||
- Respect air-gap posture: no external downloads at runtime; pin Postgres/Testcontainers images (`postgres:16-alpine` or later) in tests.
|
||||
|
||||
## Testing Expectations
|
||||
- Integration tests run with Testcontainers; Docker is required locally. Command: `dotnet test src/__Libraries/__Tests/StellaOps.Infrastructure.Postgres.Tests -c Release`.
|
||||
- Tests must isolate state per class (unique schema names) and clean up via transactions or schema drops.
|
||||
- Treat analyzer warnings as errors; ensure nullable enabled and `LangVersion` follows repo default.
|
||||
|
||||
## Handoff Notes
|
||||
- Align configuration defaults with the provisioning values under `ops/devops/postgres` (ports, pool sizes, SSL/TLS).
|
||||
- Update this AGENTS file whenever connection/session rules or provisioning defaults change; record updates in the sprint Execution Log.
|
||||
Reference in New Issue
Block a user