notify doctors work, audit work, new product advisory sprints
This commit is contained in:
@@ -0,0 +1,262 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorReceiptTests.cs
|
||||
// Description: Unit tests for standardized Rekor receipt schema.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Tests.Rekor;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Category", "Rekor")]
|
||||
public sealed class RekorReceiptTests
|
||||
{
|
||||
[Fact]
|
||||
public void RekorReceipt_SerializesToValidJson()
|
||||
{
|
||||
// Arrange
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(receipt, new JsonSerializerOptions { WriteIndented = true });
|
||||
|
||||
// Assert
|
||||
json.Should().NotBeNullOrEmpty();
|
||||
json.Should().Contain("\"schemaVersion\":");
|
||||
json.Should().Contain("\"uuid\":");
|
||||
json.Should().Contain("\"logIndex\":");
|
||||
json.Should().Contain("\"checkpoint\":");
|
||||
json.Should().Contain("\"inclusionProof\":");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceipt_RoundtripsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateValidReceipt();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(original);
|
||||
var deserialized = JsonSerializer.Deserialize<RekorReceipt>(json);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Uuid.Should().Be(original.Uuid);
|
||||
deserialized.LogIndex.Should().Be(original.LogIndex);
|
||||
deserialized.LogId.Should().Be(original.LogId);
|
||||
deserialized.IntegratedTime.Should().Be(original.IntegratedTime);
|
||||
deserialized.EntryKind.Should().Be(original.EntryKind);
|
||||
deserialized.EntryBodyHash.Should().Be(original.EntryBodyHash);
|
||||
deserialized.Checkpoint.Origin.Should().Be(original.Checkpoint.Origin);
|
||||
deserialized.InclusionProof.LeafHash.Should().Be(original.InclusionProof.LeafHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceipt_IntegratedTimeUtc_ConvertsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var unixTime = 1704067200L; // 2024-01-01 00:00:00 UTC
|
||||
var receipt = CreateValidReceipt() with { IntegratedTime = unixTime };
|
||||
|
||||
// Act
|
||||
var utc = receipt.IntegratedTimeUtc;
|
||||
|
||||
// Assert
|
||||
utc.Year.Should().Be(2024);
|
||||
utc.Month.Should().Be(1);
|
||||
utc.Day.Should().Be(1);
|
||||
utc.Hour.Should().Be(0);
|
||||
utc.Minute.Should().Be(0);
|
||||
utc.Second.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceipt_EntryUrl_FormsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var receipt = CreateValidReceipt() with
|
||||
{
|
||||
LogUrl = "https://rekor.sigstore.dev",
|
||||
Uuid = "abc123def456"
|
||||
};
|
||||
|
||||
// Act
|
||||
var entryUrl = receipt.EntryUrl;
|
||||
|
||||
// Assert
|
||||
entryUrl.Should().Be("https://rekor.sigstore.dev/api/v1/log/entries/abc123def456");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceipt_EntryUrl_HandlesTrailingSlash()
|
||||
{
|
||||
// Arrange
|
||||
var receipt = CreateValidReceipt() with
|
||||
{
|
||||
LogUrl = "https://rekor.sigstore.dev/",
|
||||
Uuid = "abc123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var entryUrl = receipt.EntryUrl;
|
||||
|
||||
// Assert
|
||||
entryUrl.Should().Be("https://rekor.sigstore.dev/api/v1/log/entries/abc123");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorCheckpointV2_TimestampUtc_ConvertsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var checkpoint = new RekorCheckpointV2
|
||||
{
|
||||
Origin = "test-origin",
|
||||
Size = 1000,
|
||||
RootHash = "abc123",
|
||||
Timestamp = 1704067200L,
|
||||
Signature = "sig123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var utc = checkpoint.TimestampUtc;
|
||||
|
||||
// Assert
|
||||
utc.Year.Should().Be(2024);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorInclusionProofV2_SerializesHashesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var proof = new RekorInclusionProofV2
|
||||
{
|
||||
LogIndex = 1000,
|
||||
TreeSize = 2000,
|
||||
RootHash = "root123",
|
||||
LeafHash = "leaf456",
|
||||
Hashes = ["hash1", "hash2", "hash3"]
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(proof);
|
||||
var deserialized = JsonSerializer.Deserialize<RekorInclusionProofV2>(json);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Hashes.Should().HaveCount(3);
|
||||
deserialized.Hashes.Should().ContainInOrder("hash1", "hash2", "hash3");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceiptVerificationResult_WhenValid_IsHealthy()
|
||||
{
|
||||
// Arrange
|
||||
var result = new RekorReceiptVerificationResult
|
||||
{
|
||||
IsValid = true,
|
||||
CheckpointSignatureValid = true,
|
||||
InclusionProofValid = true,
|
||||
EntryHashValid = true,
|
||||
TimeSkewAcceptable = true,
|
||||
VerifiedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Errors.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceiptVerificationResult_WhenInvalid_ContainsErrors()
|
||||
{
|
||||
// Arrange
|
||||
var result = new RekorReceiptVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
CheckpointSignatureValid = false,
|
||||
InclusionProofValid = true,
|
||||
EntryHashValid = true,
|
||||
TimeSkewAcceptable = true,
|
||||
Errors = ["Checkpoint signature verification failed"],
|
||||
VerifiedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain("Checkpoint signature verification failed");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceiptVerificationOptions_HasSensibleDefaults()
|
||||
{
|
||||
// Arrange & Act
|
||||
var options = new RekorReceiptVerificationOptions();
|
||||
|
||||
// Assert
|
||||
options.MaxClockSkewSeconds.Should().Be(300); // 5 minutes
|
||||
options.AllowOfflineVerification.Should().BeTrue();
|
||||
options.MaxOfflineCheckpointAgeHours.Should().Be(24);
|
||||
options.RequireCheckpointSignature.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceipt_IncludesOptionalPolicyFields()
|
||||
{
|
||||
// Arrange
|
||||
var receipt = CreateValidReceipt() with
|
||||
{
|
||||
PolicyHash = "sha256:policy123",
|
||||
GraphRevision = "rev-456",
|
||||
IdempotencyKey = "idem-789"
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(receipt);
|
||||
var deserialized = JsonSerializer.Deserialize<RekorReceipt>(json);
|
||||
|
||||
// Assert
|
||||
deserialized!.PolicyHash.Should().Be("sha256:policy123");
|
||||
deserialized.GraphRevision.Should().Be("rev-456");
|
||||
deserialized.IdempotencyKey.Should().Be("idem-789");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RekorReceipt_SchemaVersion_DefaultsTo1_0_0()
|
||||
{
|
||||
// Arrange
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Assert
|
||||
receipt.SchemaVersion.Should().Be("1.0.0");
|
||||
}
|
||||
|
||||
private static RekorReceipt CreateValidReceipt() => new()
|
||||
{
|
||||
Uuid = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
|
||||
LogIndex = 12345,
|
||||
LogId = "rekor.sigstore.dev - 2605736670972794746",
|
||||
LogUrl = "https://rekor.sigstore.dev",
|
||||
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
|
||||
EntryKind = "dsse",
|
||||
EntryBodyHash = "sha256:abcdef123456",
|
||||
Checkpoint = new RekorCheckpointV2
|
||||
{
|
||||
Origin = "rekor.sigstore.dev - 2605736670972794746",
|
||||
Size = 50000,
|
||||
RootHash = "abc123def456",
|
||||
Timestamp = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
|
||||
Signature = "MEUCIQDtest..."
|
||||
},
|
||||
InclusionProof = new RekorInclusionProofV2
|
||||
{
|
||||
LogIndex = 12345,
|
||||
TreeSize = 50000,
|
||||
RootHash = "abc123def456",
|
||||
LeafHash = "leaf789xyz",
|
||||
Hashes = ["hash1", "hash2", "hash3"]
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TransparencyStatusProviderTests.cs
|
||||
// Description: Unit tests for transparency status provider.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Attestor.Core.Transparency;
|
||||
using Xunit;
|
||||
using OptionsFactory = Microsoft.Extensions.Options.Options;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Tests.Transparency;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Category", "Transparency")]
|
||||
public sealed class TransparencyStatusProviderTests : IDisposable
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly TransparencyStatusProvider _provider;
|
||||
|
||||
public TransparencyStatusProviderTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
var options = OptionsFactory.Create(new TransparencyStatusOptions
|
||||
{
|
||||
MaxCheckpointAgeHours = 24,
|
||||
CriticalCheckpointAgeHours = 72,
|
||||
RekorBackendUrl = "https://rekor.sigstore.dev"
|
||||
});
|
||||
|
||||
_provider = new TransparencyStatusProvider(
|
||||
NullLogger<TransparencyStatusProvider>.Instance,
|
||||
options,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_WhenNeverSynced_ReturnsUnknown()
|
||||
{
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.Status.Should().Be(TransparencyStatusLevel.Unknown);
|
||||
status.LastSyncAt.Should().BeNull();
|
||||
status.Message.Should().Contain("never synced");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_WhenRecentlySync_ReturnsHealthy()
|
||||
{
|
||||
// Arrange
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-1);
|
||||
_provider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.Status.Should().Be(TransparencyStatusLevel.Healthy);
|
||||
status.LastSyncAt.Should().Be(syncTime);
|
||||
status.LastSyncAgeHours.Should().BeApproximately(1, 0.1);
|
||||
status.LastCheckpointTreeSize.Should().Be(12345);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_WhenSyncStale_ReturnsDegraded()
|
||||
{
|
||||
// Arrange - sync 30 hours ago (exceeds 24h threshold)
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-30);
|
||||
_provider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.Status.Should().Be(TransparencyStatusLevel.Degraded);
|
||||
status.LastSyncAgeHours.Should().BeApproximately(30, 0.1);
|
||||
status.Message.Should().Contain("stale");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_WhenSyncCriticallyStale_ReturnsUnhealthy()
|
||||
{
|
||||
// Arrange - sync 80 hours ago (exceeds 72h critical threshold)
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-80);
|
||||
_provider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.Status.Should().Be(TransparencyStatusLevel.Unhealthy);
|
||||
status.Message.Should().Contain("critically stale");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_WhenOfflineModeWithFreshCheckpoint_ReturnsOffline()
|
||||
{
|
||||
// Arrange - create provider without backend URL (offline mode)
|
||||
var offlineOptions = OptionsFactory.Create(new TransparencyStatusOptions
|
||||
{
|
||||
MaxCheckpointAgeHours = 24,
|
||||
RekorBackendUrl = null // Offline mode
|
||||
});
|
||||
|
||||
using var offlineProvider = new TransparencyStatusProvider(
|
||||
NullLogger<TransparencyStatusProvider>.Instance,
|
||||
offlineOptions,
|
||||
_timeProvider);
|
||||
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-1);
|
||||
offlineProvider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await offlineProvider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.Status.Should().Be(TransparencyStatusLevel.Offline);
|
||||
status.OfflineMode.Should().BeTrue();
|
||||
status.Message.Should().Contain("offline mode");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordSubmission_TracksMetrics()
|
||||
{
|
||||
// Arrange
|
||||
var latency1 = TimeSpan.FromMilliseconds(100);
|
||||
var latency2 = TimeSpan.FromMilliseconds(200);
|
||||
var latency3 = TimeSpan.FromMilliseconds(150);
|
||||
|
||||
// Act
|
||||
_provider.RecordSubmission(true, latency1);
|
||||
_provider.RecordSubmission(true, latency2);
|
||||
_provider.RecordSubmission(false, latency3);
|
||||
|
||||
// Assert
|
||||
var status = _provider.GetStatusAsync().Result;
|
||||
status.Metrics.Should().NotBeNull();
|
||||
status.Metrics!.SubmissionsLastHour.Should().Be(3);
|
||||
status.Metrics.SuccessfulSubmissionsLastHour.Should().Be(2);
|
||||
status.Metrics.FailedSubmissionsLastHour.Should().Be(1);
|
||||
status.Metrics.AvgSubmissionLatencyMs.Should().Be(150); // (100+200)/2 = 150 (only successful)
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RecordVerification_TracksMetrics()
|
||||
{
|
||||
// Act
|
||||
_provider.RecordVerification(true, false);
|
||||
_provider.RecordVerification(true, true);
|
||||
_provider.RecordVerification(false, false);
|
||||
|
||||
// Assert
|
||||
var status = _provider.GetStatusAsync().Result;
|
||||
status.Metrics.Should().NotBeNull();
|
||||
status.Metrics!.VerificationsLastHour.Should().Be(3);
|
||||
status.Metrics.SuccessfulVerificationsLastHour.Should().Be(2);
|
||||
status.Metrics.OfflineVerificationsLastHour.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_ReportsQueueDepths()
|
||||
{
|
||||
// Arrange
|
||||
_provider.UpdateQueueDepths(submissionQueue: 5, deadLetterQueue: 2);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.SubmissionQueueDepth.Should().Be(5);
|
||||
status.DeadLetterQueueDepth.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatusAsync_ReportsConfiguration()
|
||||
{
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.MaxCheckpointAgeHours.Should().Be(24);
|
||||
status.RekorBackend.Should().Be("https://rekor.sigstore.dev");
|
||||
status.EnforcementEnabled.Should().BeFalse(); // default
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsCheckpointFresh_WhenWithinThreshold_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-12);
|
||||
_provider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.IsCheckpointFresh.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsCheckpointFresh_WhenExceedsThreshold_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-30);
|
||||
_provider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.IsCheckpointFresh.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsHealthy_WhenHealthyOrDegraded_ReturnsTrue()
|
||||
{
|
||||
// Arrange - fresh sync (healthy)
|
||||
var syncTime = _timeProvider.GetUtcNow().AddHours(-1);
|
||||
_provider.RecordSync(syncTime, 12345);
|
||||
|
||||
// Act
|
||||
var status = await _provider.GetStatusAsync();
|
||||
|
||||
// Assert
|
||||
status.IsHealthy.Should().BeTrue();
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_provider.Dispose();
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _utcNow;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset utcNow)
|
||||
{
|
||||
_utcNow = utcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _utcNow;
|
||||
|
||||
public void Advance(TimeSpan duration) => _utcNow = _utcNow.Add(duration);
|
||||
}
|
||||
}
|
||||
@@ -121,6 +121,30 @@ public sealed class AttestorOptions
|
||||
public int PollIntervalMs { get; set; } = 250;
|
||||
|
||||
public int MaxAttempts { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Log version to use: Auto, V1, or V2.
|
||||
/// V2 uses tile-based (Sunlight) log structure.
|
||||
/// Default: Auto (backward compatible).
|
||||
/// </summary>
|
||||
public string Version { get; set; } = "Auto";
|
||||
|
||||
/// <summary>
|
||||
/// Base URL for tile fetching in Rekor v2.
|
||||
/// If not specified, defaults to {Url}/tile/.
|
||||
/// </summary>
|
||||
public string? TileBaseUrl { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Log ID (SHA-256 of log's public key) for multi-log environments.
|
||||
/// Production Rekor: c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d
|
||||
/// </summary>
|
||||
public string? LogId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When true and Version is Auto, prefer tile-based proofs over v1 proofs.
|
||||
/// </summary>
|
||||
public bool PreferTileProofs { get; set; } = false;
|
||||
}
|
||||
|
||||
public sealed class RekorMirrorOptions : RekorBackendOptions
|
||||
|
||||
@@ -0,0 +1,208 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Client for fetching proofs from Rekor v2 tile-based logs.
|
||||
/// Tile-based logs store the Merkle tree in fixed-size chunks (tiles)
|
||||
/// that can be fetched directly for offline-capable verification.
|
||||
/// </summary>
|
||||
public interface IRekorTileClient
|
||||
{
|
||||
/// <summary>
|
||||
/// Fetches the latest signed checkpoint from the tile log.
|
||||
/// The checkpoint contains the current tree size and root hash.
|
||||
/// </summary>
|
||||
/// <param name="backend">Rekor backend configuration</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>The checkpoint response, or null if not available</returns>
|
||||
Task<RekorTileCheckpoint?> GetCheckpointAsync(
|
||||
RekorBackend backend,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches a tile from the log.
|
||||
/// Tiles are fixed-size chunks of the Merkle tree.
|
||||
/// </summary>
|
||||
/// <param name="backend">Rekor backend configuration</param>
|
||||
/// <param name="level">The tree level (0 = leaves)</param>
|
||||
/// <param name="index">The tile index at this level</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>The tile data, or null if not found</returns>
|
||||
Task<RekorTileData?> GetTileAsync(
|
||||
RekorBackend backend,
|
||||
int level,
|
||||
long index,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches an entry from the log by its index.
|
||||
/// </summary>
|
||||
/// <param name="backend">Rekor backend configuration</param>
|
||||
/// <param name="logIndex">The log index of the entry</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>The entry data, or null if not found</returns>
|
||||
Task<RekorTileEntry?> GetEntryAsync(
|
||||
RekorBackend backend,
|
||||
long logIndex,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Computes an inclusion proof for an entry using tile data.
|
||||
/// This fetches the necessary tiles and constructs the proof path.
|
||||
/// </summary>
|
||||
/// <param name="backend">Rekor backend configuration</param>
|
||||
/// <param name="logIndex">The log index of the entry</param>
|
||||
/// <param name="treeSize">The tree size for the proof (from checkpoint)</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>The computed proof, or null if tiles are unavailable</returns>
|
||||
Task<RekorTileInclusionProof?> ComputeInclusionProofAsync(
|
||||
RekorBackend backend,
|
||||
long logIndex,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checkpoint from a Rekor v2 tile-based log.
|
||||
/// </summary>
|
||||
public sealed class RekorTileCheckpoint
|
||||
{
|
||||
/// <summary>
|
||||
/// The log origin identifier.
|
||||
/// </summary>
|
||||
public required string Origin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current tree size (number of entries).
|
||||
/// </summary>
|
||||
public required long TreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Root hash of the Merkle tree at this size.
|
||||
/// </summary>
|
||||
public required byte[] RootHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw checkpoint note text for signature verification.
|
||||
/// </summary>
|
||||
public required string RawCheckpoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signatures on the checkpoint.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<RekorCheckpointSignature> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A signature on a Rekor checkpoint.
|
||||
/// </summary>
|
||||
public sealed class RekorCheckpointSignature
|
||||
{
|
||||
/// <summary>
|
||||
/// Key ID or hint for the signing key.
|
||||
/// </summary>
|
||||
public required string KeyHint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The signature bytes.
|
||||
/// </summary>
|
||||
public required byte[] Signature { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Data from a Merkle tree tile.
|
||||
/// </summary>
|
||||
public sealed class RekorTileData
|
||||
{
|
||||
/// <summary>
|
||||
/// The level in the tree (0 = leaf level).
|
||||
/// </summary>
|
||||
public required int Level { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The tile index at this level.
|
||||
/// </summary>
|
||||
public required long Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The tile width (number of entries in this tile, may be partial).
|
||||
/// </summary>
|
||||
public required int Width { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The hash data in this tile.
|
||||
/// Each hash is 32 bytes (SHA-256).
|
||||
/// </summary>
|
||||
public required byte[] Hashes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the hash at the given position within the tile.
|
||||
/// </summary>
|
||||
public byte[] GetHash(int position)
|
||||
{
|
||||
if (position < 0 || position >= Width)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(position));
|
||||
}
|
||||
|
||||
var result = new byte[32];
|
||||
Array.Copy(Hashes, position * 32, result, 0, 32);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An entry from a Rekor tile-based log.
|
||||
/// </summary>
|
||||
public sealed class RekorTileEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// The log index of this entry.
|
||||
/// </summary>
|
||||
public required long LogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The entry data (typically the leaf hash input).
|
||||
/// </summary>
|
||||
public required byte[] Data { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The integrated time when this entry was added.
|
||||
/// </summary>
|
||||
public DateTimeOffset? IntegratedTime { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An inclusion proof computed from tile data.
|
||||
/// </summary>
|
||||
public sealed class RekorTileInclusionProof
|
||||
{
|
||||
/// <summary>
|
||||
/// The log index of the entry.
|
||||
/// </summary>
|
||||
public required long LogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The tree size for this proof.
|
||||
/// </summary>
|
||||
public required long TreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The leaf hash of the entry.
|
||||
/// </summary>
|
||||
public required byte[] LeafHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The proof path (sibling hashes from leaf to root).
|
||||
/// </summary>
|
||||
public required IReadOnlyList<byte[]> Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The expected root hash for verification.
|
||||
/// </summary>
|
||||
public required byte[] RootHash { get; init; }
|
||||
}
|
||||
@@ -2,15 +2,82 @@ using System;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the Rekor log version/format to use.
|
||||
/// </summary>
|
||||
public enum RekorLogVersion
|
||||
{
|
||||
/// <summary>
|
||||
/// Automatically detect log version from server capabilities.
|
||||
/// </summary>
|
||||
Auto = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Rekor v1 with Trillian-backed Merkle tree.
|
||||
/// </summary>
|
||||
V1 = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Rekor v2 with tile-based (Sunlight) log structure.
|
||||
/// Provides cheaper operation and simpler verification.
|
||||
/// </summary>
|
||||
V2 = 2
|
||||
}
|
||||
|
||||
public sealed class RekorBackend
|
||||
{
|
||||
public required string Name { get; init; }
|
||||
|
||||
public required Uri Url { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Log version to use. Default is Auto for backward compatibility.
|
||||
/// Set to V2 to explicitly opt into tile-based verification.
|
||||
/// </summary>
|
||||
public RekorLogVersion Version { get; init; } = RekorLogVersion.Auto;
|
||||
|
||||
/// <summary>
|
||||
/// Base URL for tile fetching in Rekor v2.
|
||||
/// If not specified, tiles are fetched from {Url}/tile/.
|
||||
/// Only used when Version is V2 or Auto detects v2 capabilities.
|
||||
/// </summary>
|
||||
public Uri? TileBaseUrl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Log ID (SHA-256 of the log's public key) for multi-log environments.
|
||||
/// Used to match entries to the correct log when verifying bundles.
|
||||
/// Production Rekor: c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d
|
||||
/// </summary>
|
||||
public string? LogId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to prefer tile-based proofs when available.
|
||||
/// When true and Version is Auto, will attempt tile fetching first.
|
||||
/// </summary>
|
||||
public bool PreferTileProofs { get; init; } = false;
|
||||
|
||||
public TimeSpan ProofTimeout { get; init; } = TimeSpan.FromSeconds(15);
|
||||
|
||||
public TimeSpan PollInterval { get; init; } = TimeSpan.FromMilliseconds(250);
|
||||
|
||||
public int MaxAttempts { get; init; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Returns the effective tile base URL, defaulting to {Url}/tile/ if not specified.
|
||||
/// </summary>
|
||||
public Uri GetEffectiveTileBaseUrl()
|
||||
{
|
||||
if (TileBaseUrl is not null)
|
||||
{
|
||||
return TileBaseUrl;
|
||||
}
|
||||
|
||||
var baseUri = Url.ToString().TrimEnd('/');
|
||||
return new Uri($"{baseUri}/tile/", UriKind.Absolute);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Known log ID for the public Sigstore Rekor production instance.
|
||||
/// </summary>
|
||||
public const string SigstoreProductionLogId = "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,429 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorReceipt.cs
|
||||
// Description: Standardized Rekor transparency log receipt per Sigstore conventions.
|
||||
// Implements receipt schema standardization from SBOM-VEX-policy advisory.
|
||||
// References: https://docs.sigstore.dev/logging/overview/, Rekor v2 GA
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Standardized Rekor transparency log receipt following Sigstore conventions.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This receipt format aligns with:
|
||||
/// - Rekor v2 GA specification (https://blog.sigstore.dev/rekor-v2-ga/)
|
||||
/// - Sigstore bundle format (https://docs.sigstore.dev/bundle/)
|
||||
/// - RFC 6962 certificate transparency log semantics
|
||||
///
|
||||
/// Design principles:
|
||||
/// - All fields use deterministic JSON property names
|
||||
/// - Timestamps use Unix seconds for interoperability
|
||||
/// - Hashes use lowercase hex encoding
|
||||
/// - Inclusion proofs follow RFC 6962 structure
|
||||
/// </remarks>
|
||||
public sealed record RekorReceipt
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version for this receipt format.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string SchemaVersion { get; init; } = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// Unique entry identifier (64-character hex string derived from entry hash).
|
||||
/// </summary>
|
||||
[JsonPropertyName("uuid")]
|
||||
public required string Uuid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Log index (position in the log, monotonically increasing).
|
||||
/// </summary>
|
||||
[JsonPropertyName("logIndex")]
|
||||
public required long LogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Log ID identifying the specific Rekor instance/shard.
|
||||
/// </summary>
|
||||
[JsonPropertyName("logId")]
|
||||
public required string LogId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base URL of the Rekor log instance.
|
||||
/// </summary>
|
||||
[JsonPropertyName("logUrl")]
|
||||
public required string LogUrl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unix timestamp (seconds) when the entry was integrated into the log.
|
||||
/// </summary>
|
||||
[JsonPropertyName("integratedTime")]
|
||||
public required long IntegratedTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry kind (e.g., "intoto", "hashedrekord", "dsse").
|
||||
/// </summary>
|
||||
[JsonPropertyName("entryKind")]
|
||||
public required string EntryKind { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry API version within the kind.
|
||||
/// </summary>
|
||||
[JsonPropertyName("entryVersion")]
|
||||
public string EntryVersion { get; init; } = "0.0.2";
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of the canonicalized entry body (lowercase hex).
|
||||
/// </summary>
|
||||
[JsonPropertyName("entryBodyHash")]
|
||||
public required string EntryBodyHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signed checkpoint (signed tree head) in note format.
|
||||
/// </summary>
|
||||
[JsonPropertyName("checkpoint")]
|
||||
public required RekorCheckpointV2 Checkpoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Inclusion proof demonstrating entry is in the log.
|
||||
/// </summary>
|
||||
[JsonPropertyName("inclusionProof")]
|
||||
public required RekorInclusionProofV2 InclusionProof { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional SET (Signed Entry Timestamp) for backward compatibility.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signedEntryTimestamp")]
|
||||
public string? SignedEntryTimestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy hash linking this receipt to a specific policy evaluation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyHash")]
|
||||
public string? PolicyHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Graph revision ID for reachability context.
|
||||
/// </summary>
|
||||
[JsonPropertyName("graphRevision")]
|
||||
public string? GraphRevision { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Idempotency key used for submission (for deduplication tracking).
|
||||
/// </summary>
|
||||
[JsonPropertyName("idempotencyKey")]
|
||||
public string? IdempotencyKey { get; init; }
|
||||
|
||||
// Computed properties
|
||||
|
||||
/// <summary>
|
||||
/// Gets the integrated time as a DateTimeOffset (UTC).
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public DateTimeOffset IntegratedTimeUtc =>
|
||||
DateTimeOffset.FromUnixTimeSeconds(IntegratedTime);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the full entry URL for direct access.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public string EntryUrl => $"{LogUrl.TrimEnd('/')}/api/v1/log/entries/{Uuid}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor v2 checkpoint (signed tree head) following note format.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Checkpoint format per Rekor v2 specification:
|
||||
/// - Origin identifies the log
|
||||
/// - Size is the tree size at checkpoint
|
||||
/// - RootHash is the Merkle root
|
||||
/// - Signature is over the checkpoint note body
|
||||
/// </remarks>
|
||||
public sealed record RekorCheckpointV2
|
||||
{
|
||||
/// <summary>
|
||||
/// Origin line identifying the log (e.g., "rekor.sigstore.dev - 2605736670972794746").
|
||||
/// </summary>
|
||||
[JsonPropertyName("origin")]
|
||||
public required string Origin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tree size at time of checkpoint.
|
||||
/// </summary>
|
||||
[JsonPropertyName("size")]
|
||||
public required long Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle tree root hash (lowercase hex).
|
||||
/// </summary>
|
||||
[JsonPropertyName("rootHash")]
|
||||
public required string RootHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unix timestamp (seconds) of the checkpoint.
|
||||
/// </summary>
|
||||
[JsonPropertyName("timestamp")]
|
||||
public required long Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature over the checkpoint note.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signature")]
|
||||
public required string Signature { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID or hint for signature verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("keyHint")]
|
||||
public string? KeyHint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw note body for signature verification (base64-encoded).
|
||||
/// </summary>
|
||||
[JsonPropertyName("noteBody")]
|
||||
public string? NoteBody { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the timestamp as a DateTimeOffset (UTC).
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public DateTimeOffset TimestampUtc =>
|
||||
DateTimeOffset.FromUnixTimeSeconds(Timestamp);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor v2 inclusion proof following RFC 6962.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Inclusion proof structure:
|
||||
/// - LeafHash is H(0x00 || entry)
|
||||
/// - Hashes are the sibling nodes from leaf to root
|
||||
/// - TreeSize and LogIndex define the proof context
|
||||
/// </remarks>
|
||||
public sealed record RekorInclusionProofV2
|
||||
{
|
||||
/// <summary>
|
||||
/// Log index of the entry being proven.
|
||||
/// </summary>
|
||||
[JsonPropertyName("logIndex")]
|
||||
public required long LogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tree size at time of proof generation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("treeSize")]
|
||||
public required long TreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Root hash at time of proof (lowercase hex).
|
||||
/// </summary>
|
||||
[JsonPropertyName("rootHash")]
|
||||
public required string RootHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Leaf hash (SHA-256 of 0x00 || entry body, lowercase hex).
|
||||
/// </summary>
|
||||
[JsonPropertyName("leafHash")]
|
||||
public required string LeafHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Inclusion proof hashes from leaf to root (lowercase hex, ordered).
|
||||
/// </summary>
|
||||
[JsonPropertyName("hashes")]
|
||||
public required IReadOnlyList<string> Hashes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Checkpoint reference containing the signed tree head.
|
||||
/// </summary>
|
||||
[JsonPropertyName("checkpoint")]
|
||||
public string? CheckpointRef { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a Rekor receipt.
|
||||
/// </summary>
|
||||
public sealed record RekorReceiptVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the receipt is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the checkpoint signature verified.
|
||||
/// </summary>
|
||||
public required bool CheckpointSignatureValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the inclusion proof verified against the root.
|
||||
/// </summary>
|
||||
public required bool InclusionProofValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the entry hash matches the leaf.
|
||||
/// </summary>
|
||||
public required bool EntryHashValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time skew in seconds (positive = receipt ahead of local clock).
|
||||
/// </summary>
|
||||
public double TimeSkewSeconds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether time skew is within acceptable bounds.
|
||||
/// </summary>
|
||||
public required bool TimeSkewAcceptable { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Any verification errors encountered.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Verification diagnostics for debugging.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string> Diagnostics { get; init; } =
|
||||
new Dictionary<string, string>();
|
||||
|
||||
/// <summary>
|
||||
/// When the verification was performed (UTC).
|
||||
/// </summary>
|
||||
public required DateTimeOffset VerifiedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this was verified in offline mode.
|
||||
/// </summary>
|
||||
public bool OfflineVerification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for Rekor receipt verification.
|
||||
/// </summary>
|
||||
public sealed record RekorReceiptVerificationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum allowed clock skew in seconds (default: 300 = 5 minutes).
|
||||
/// </summary>
|
||||
public int MaxClockSkewSeconds { get; init; } = 300;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to allow offline verification using cached checkpoints.
|
||||
/// </summary>
|
||||
public bool AllowOfflineVerification { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Path to offline checkpoint bundle for air-gapped verification.
|
||||
/// </summary>
|
||||
public string? OfflineCheckpointBundlePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum checkpoint age in hours for offline verification (default: 24).
|
||||
/// </summary>
|
||||
public int MaxOfflineCheckpointAgeHours { get; init; } = 24;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to require checkpoint signature verification.
|
||||
/// </summary>
|
||||
public bool RequireCheckpointSignature { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Trusted public keys for checkpoint verification (PEM or base64 DER).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> TrustedPublicKeys { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Trusted log IDs (if empty, all known logs are trusted).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> TrustedLogIds { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for verifying Rekor receipts.
|
||||
/// </summary>
|
||||
public interface IRekorReceiptVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies a Rekor receipt.
|
||||
/// </summary>
|
||||
Task<RekorReceiptVerificationResult> VerifyAsync(
|
||||
RekorReceipt receipt,
|
||||
RekorReceiptVerificationOptions? options = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies the inclusion proof without network access.
|
||||
/// </summary>
|
||||
RekorReceiptVerificationResult VerifyInclusionProofOffline(
|
||||
RekorReceipt receipt,
|
||||
byte[] entryBody,
|
||||
RekorReceiptVerificationOptions? options = null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating Rekor receipts from submission responses.
|
||||
/// </summary>
|
||||
public static class RekorReceiptFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a standardized receipt from a submission response.
|
||||
/// </summary>
|
||||
public static RekorReceipt FromSubmissionResponse(
|
||||
RekorSubmissionResponse response,
|
||||
string logId,
|
||||
string logUrl,
|
||||
string entryKind,
|
||||
string entryBodyHash,
|
||||
string? policyHash = null,
|
||||
string? graphRevision = null,
|
||||
string? idempotencyKey = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(response);
|
||||
ArgumentException.ThrowIfNullOrEmpty(logId);
|
||||
ArgumentException.ThrowIfNullOrEmpty(logUrl);
|
||||
ArgumentException.ThrowIfNullOrEmpty(entryKind);
|
||||
ArgumentException.ThrowIfNullOrEmpty(entryBodyHash);
|
||||
|
||||
if (response.Proof?.Checkpoint is null)
|
||||
{
|
||||
throw new ArgumentException("Response must include checkpoint proof", nameof(response));
|
||||
}
|
||||
|
||||
if (response.Proof?.Inclusion is null)
|
||||
{
|
||||
throw new ArgumentException("Response must include inclusion proof", nameof(response));
|
||||
}
|
||||
|
||||
return new RekorReceipt
|
||||
{
|
||||
Uuid = response.Uuid,
|
||||
LogIndex = response.Index ?? throw new ArgumentException("Response must include index"),
|
||||
LogId = logId,
|
||||
LogUrl = logUrl,
|
||||
IntegratedTime = response.IntegratedTime ?? throw new ArgumentException("Response must include integrated time"),
|
||||
EntryKind = entryKind,
|
||||
EntryBodyHash = entryBodyHash,
|
||||
Checkpoint = new RekorCheckpointV2
|
||||
{
|
||||
Origin = response.Proof.Checkpoint.Origin ?? logId,
|
||||
Size = response.Proof.Checkpoint.Size,
|
||||
RootHash = response.Proof.Checkpoint.RootHash ?? throw new ArgumentException("Checkpoint must include root hash"),
|
||||
Timestamp = response.Proof.Checkpoint.Timestamp?.ToUnixTimeSeconds() ?? response.IntegratedTime.Value,
|
||||
Signature = "" // Will be populated from actual response
|
||||
},
|
||||
InclusionProof = new RekorInclusionProofV2
|
||||
{
|
||||
LogIndex = response.Index.Value,
|
||||
TreeSize = response.Proof.Checkpoint.Size,
|
||||
RootHash = response.Proof.Checkpoint.RootHash,
|
||||
LeafHash = response.Proof.Inclusion.LeafHash ?? throw new ArgumentException("Inclusion proof must include leaf hash"),
|
||||
Hashes = response.Proof.Inclusion.Path
|
||||
},
|
||||
PolicyHash = policyHash,
|
||||
GraphRevision = graphRevision,
|
||||
IdempotencyKey = idempotencyKey
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TransparencyServiceExtensions.cs
|
||||
// Description: DI extensions for transparency status services.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Transparency;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering transparency services.
|
||||
/// </summary>
|
||||
public static class TransparencyServiceExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds transparency status services to the service collection.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">Optional configuration action.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddTransparencyStatus(
|
||||
this IServiceCollection services,
|
||||
Action<TransparencyStatusOptions>? configure = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
// Register options
|
||||
if (configure is not null)
|
||||
{
|
||||
services.Configure(configure);
|
||||
}
|
||||
else
|
||||
{
|
||||
services.AddOptions<TransparencyStatusOptions>();
|
||||
}
|
||||
|
||||
// Register provider
|
||||
services.TryAddSingleton<ITransparencyStatusProvider, TransparencyStatusProvider>();
|
||||
|
||||
// Ensure TimeProvider is available
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,425 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TransparencyStatus.cs
|
||||
// Description: Transparency log freshness status for health endpoints.
|
||||
// Implements "last sync" freshness badge from SBOM-VEX-policy advisory.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Transparency;
|
||||
|
||||
/// <summary>
|
||||
/// Transparency log freshness status for health endpoints and status bars.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Implements the "last sync" freshness badge pattern:
|
||||
/// - Shows when transparency log was last successfully synced
|
||||
/// - Indicates whether operations are using verified or degraded mode
|
||||
/// - Provides clear guidance for air-gapped environments
|
||||
/// </remarks>
|
||||
public sealed record TransparencyStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Overall transparency status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required TransparencyStatusLevel Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable status message.
|
||||
/// </summary>
|
||||
[JsonPropertyName("message")]
|
||||
public required string Message { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the transparency log was last successfully synced (UTC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("lastSyncAt")]
|
||||
public DateTimeOffset? LastSyncAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Age of the last sync in hours.
|
||||
/// </summary>
|
||||
[JsonPropertyName("lastSyncAgeHours")]
|
||||
public double? LastSyncAgeHours { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the checkpoint was last verified (UTC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("lastCheckpointVerifiedAt")]
|
||||
public DateTimeOffset? LastCheckpointVerifiedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest verified checkpoint tree size.
|
||||
/// </summary>
|
||||
[JsonPropertyName("lastCheckpointTreeSize")]
|
||||
public long? LastCheckpointTreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the service is operating in offline/air-gapped mode.
|
||||
/// </summary>
|
||||
[JsonPropertyName("offlineMode")]
|
||||
public bool OfflineMode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether transparency verification is enforced (vs. best-effort).
|
||||
/// </summary>
|
||||
[JsonPropertyName("enforcementEnabled")]
|
||||
public bool EnforcementEnabled { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Configured maximum checkpoint age before warning (hours).
|
||||
/// </summary>
|
||||
[JsonPropertyName("maxCheckpointAgeHours")]
|
||||
public int MaxCheckpointAgeHours { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Primary Rekor backend URL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rekorBackend")]
|
||||
public string? RekorBackend { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Mirror Rekor backend URL (for air-gapped or fallback).
|
||||
/// </summary>
|
||||
[JsonPropertyName("rekorMirror")]
|
||||
public string? RekorMirror { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Submission queue depth (pending entries awaiting transparency anchoring).
|
||||
/// </summary>
|
||||
[JsonPropertyName("submissionQueueDepth")]
|
||||
public int SubmissionQueueDepth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of entries in dead-letter queue (failed submissions).
|
||||
/// </summary>
|
||||
[JsonPropertyName("deadLetterQueueDepth")]
|
||||
public int DeadLetterQueueDepth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for recent operations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("metrics")]
|
||||
public TransparencyMetrics? Metrics { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Health check details for each backend.
|
||||
/// </summary>
|
||||
[JsonPropertyName("backends")]
|
||||
public IReadOnlyList<TransparencyBackendStatus> Backends { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether the status indicates healthy operation.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public bool IsHealthy => Status is TransparencyStatusLevel.Healthy or TransparencyStatusLevel.Degraded;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the checkpoint is considered fresh.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public bool IsCheckpointFresh =>
|
||||
LastSyncAgeHours.HasValue && LastSyncAgeHours.Value <= MaxCheckpointAgeHours;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Transparency status level for health indicators.
|
||||
/// </summary>
|
||||
public enum TransparencyStatusLevel
|
||||
{
|
||||
/// <summary>
|
||||
/// All transparency backends are healthy and synced.
|
||||
/// </summary>
|
||||
Healthy,
|
||||
|
||||
/// <summary>
|
||||
/// Operating with stale checkpoint or fallback backend.
|
||||
/// </summary>
|
||||
Degraded,
|
||||
|
||||
/// <summary>
|
||||
/// Operating in offline mode with acceptable checkpoint age.
|
||||
/// </summary>
|
||||
Offline,
|
||||
|
||||
/// <summary>
|
||||
/// Transparency verification is unavailable or severely degraded.
|
||||
/// </summary>
|
||||
Unhealthy,
|
||||
|
||||
/// <summary>
|
||||
/// Transparency status is unknown (not yet initialized).
|
||||
/// </summary>
|
||||
Unknown
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for transparency operations.
|
||||
/// </summary>
|
||||
public sealed record TransparencyMetrics
|
||||
{
|
||||
/// <summary>
|
||||
/// Total submissions in the last hour.
|
||||
/// </summary>
|
||||
[JsonPropertyName("submissionsLastHour")]
|
||||
public int SubmissionsLastHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Successful submissions in the last hour.
|
||||
/// </summary>
|
||||
[JsonPropertyName("successfulSubmissionsLastHour")]
|
||||
public int SuccessfulSubmissionsLastHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Failed submissions in the last hour.
|
||||
/// </summary>
|
||||
[JsonPropertyName("failedSubmissionsLastHour")]
|
||||
public int FailedSubmissionsLastHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total verifications in the last hour.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verificationsLastHour")]
|
||||
public int VerificationsLastHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Successful verifications in the last hour.
|
||||
/// </summary>
|
||||
[JsonPropertyName("successfulVerificationsLastHour")]
|
||||
public int SuccessfulVerificationsLastHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Average submission latency in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("avgSubmissionLatencyMs")]
|
||||
public double AvgSubmissionLatencyMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// P95 submission latency in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("p95SubmissionLatencyMs")]
|
||||
public double P95SubmissionLatencyMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Offline verifications in the last hour.
|
||||
/// </summary>
|
||||
[JsonPropertyName("offlineVerificationsLastHour")]
|
||||
public int OfflineVerificationsLastHour { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of a single transparency backend.
|
||||
/// </summary>
|
||||
public sealed record TransparencyBackendStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Backend identifier (e.g., "rekor.sigstore.dev", "rekor-mirror.internal").
|
||||
/// </summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Backend URL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("url")]
|
||||
public required string Url { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this is the primary backend.
|
||||
/// </summary>
|
||||
[JsonPropertyName("primary")]
|
||||
public bool Primary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Backend health status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required BackendHealthStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the backend was last checked.
|
||||
/// </summary>
|
||||
[JsonPropertyName("lastCheckedAt")]
|
||||
public DateTimeOffset? LastCheckedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest response latency in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("latencyMs")]
|
||||
public double? LatencyMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if unhealthy.
|
||||
/// </summary>
|
||||
[JsonPropertyName("error")]
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest checkpoint tree size from this backend.
|
||||
/// </summary>
|
||||
[JsonPropertyName("treeSize")]
|
||||
public long? TreeSize { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Health status of a backend.
|
||||
/// </summary>
|
||||
public enum BackendHealthStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Backend is healthy and responding.
|
||||
/// </summary>
|
||||
Healthy,
|
||||
|
||||
/// <summary>
|
||||
/// Backend is responding slowly.
|
||||
/// </summary>
|
||||
Slow,
|
||||
|
||||
/// <summary>
|
||||
/// Backend is unreachable or erroring.
|
||||
/// </summary>
|
||||
Unhealthy,
|
||||
|
||||
/// <summary>
|
||||
/// Backend status is unknown.
|
||||
/// </summary>
|
||||
Unknown
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for retrieving transparency status.
|
||||
/// </summary>
|
||||
public interface ITransparencyStatusProvider
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the current transparency status.
|
||||
/// </summary>
|
||||
Task<TransparencyStatus> GetStatusAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Forces a refresh of the transparency status (e.g., recheck backends).
|
||||
/// </summary>
|
||||
Task<TransparencyStatus> RefreshAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Records a successful submission for metrics.
|
||||
/// </summary>
|
||||
void RecordSubmission(bool success, TimeSpan latency);
|
||||
|
||||
/// <summary>
|
||||
/// Records a verification attempt for metrics.
|
||||
/// </summary>
|
||||
void RecordVerification(bool success, bool offline);
|
||||
|
||||
/// <summary>
|
||||
/// Updates the last sync timestamp.
|
||||
/// </summary>
|
||||
void RecordSync(DateTimeOffset syncTime, long treeSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for transparency status provider.
|
||||
/// </summary>
|
||||
public sealed record TransparencyStatusOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum checkpoint age in hours before status becomes degraded (default: 24).
|
||||
/// </summary>
|
||||
public int MaxCheckpointAgeHours { get; init; } = 24;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum checkpoint age in hours before status becomes unhealthy (default: 72).
|
||||
/// </summary>
|
||||
public int CriticalCheckpointAgeHours { get; init; } = 72;
|
||||
|
||||
/// <summary>
|
||||
/// Backend health check interval in seconds (default: 60).
|
||||
/// </summary>
|
||||
public int HealthCheckIntervalSeconds { get; init; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Backend timeout in seconds (default: 10).
|
||||
/// </summary>
|
||||
public int BackendTimeoutSeconds { get; init; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Latency threshold for "slow" status in milliseconds (default: 2000).
|
||||
/// </summary>
|
||||
public int SlowLatencyThresholdMs { get; init; } = 2000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable enforcement mode (fail operations without transparency).
|
||||
/// </summary>
|
||||
public bool EnforcementEnabled { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Primary Rekor backend URL.
|
||||
/// </summary>
|
||||
public string? RekorBackendUrl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Mirror Rekor backend URL.
|
||||
/// </summary>
|
||||
public string? RekorMirrorUrl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for checking transparency backend health.
|
||||
/// Implemented in infrastructure layer with HTTP client support.
|
||||
/// </summary>
|
||||
public interface ITransparencyBackendHealthChecker
|
||||
{
|
||||
/// <summary>
|
||||
/// Checks the health of a transparency backend.
|
||||
/// </summary>
|
||||
/// <param name="url">The backend URL to check.</param>
|
||||
/// <param name="timeoutSeconds">Timeout in seconds.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Health check result.</returns>
|
||||
Task<BackendHealthCheckResult> CheckHealthAsync(
|
||||
string url,
|
||||
int timeoutSeconds,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a backend health check.
|
||||
/// </summary>
|
||||
public sealed record BackendHealthCheckResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the backend is healthy.
|
||||
/// </summary>
|
||||
public required bool IsHealthy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Response latency in milliseconds.
|
||||
/// </summary>
|
||||
public required double LatencyMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if unhealthy.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a healthy result.
|
||||
/// </summary>
|
||||
public static BackendHealthCheckResult Healthy(double latencyMs) => new()
|
||||
{
|
||||
IsHealthy = true,
|
||||
LatencyMs = latencyMs
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates an unhealthy result.
|
||||
/// </summary>
|
||||
public static BackendHealthCheckResult Unhealthy(string error, double latencyMs = 0) => new()
|
||||
{
|
||||
IsHealthy = false,
|
||||
LatencyMs = latencyMs,
|
||||
Error = error
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,347 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TransparencyStatusProvider.cs
|
||||
// Description: Default implementation of transparency status provider.
|
||||
// Tracks sync times, metrics, and backend health for freshness indicators.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Transparency;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="ITransparencyStatusProvider"/>.
|
||||
/// </summary>
|
||||
public sealed class TransparencyStatusProvider : ITransparencyStatusProvider, IDisposable
|
||||
{
|
||||
private readonly ILogger<TransparencyStatusProvider> _logger;
|
||||
private readonly TransparencyStatusOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ITransparencyBackendHealthChecker? _healthChecker;
|
||||
|
||||
private readonly object _lock = new();
|
||||
private DateTimeOffset? _lastSyncAt;
|
||||
private long _lastTreeSize;
|
||||
private DateTimeOffset? _lastCheckpointVerifiedAt;
|
||||
|
||||
// Metrics tracking (thread-safe)
|
||||
private readonly ConcurrentQueue<MetricEntry> _submissionMetrics = new();
|
||||
private readonly ConcurrentQueue<MetricEntry> _verificationMetrics = new();
|
||||
|
||||
// Backend status cache
|
||||
private readonly ConcurrentDictionary<string, TransparencyBackendStatus> _backendStatuses = new();
|
||||
private DateTimeOffset _lastHealthCheck = DateTimeOffset.MinValue;
|
||||
|
||||
// Queue depth tracking
|
||||
private int _submissionQueueDepth;
|
||||
private int _deadLetterQueueDepth;
|
||||
|
||||
public TransparencyStatusProvider(
|
||||
ILogger<TransparencyStatusProvider> logger,
|
||||
IOptions<TransparencyStatusOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ITransparencyBackendHealthChecker? healthChecker = null)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_healthChecker = healthChecker;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<TransparencyStatus> GetStatusAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Check if we need to refresh backend health
|
||||
if (now - _lastHealthCheck > TimeSpan.FromSeconds(_options.HealthCheckIntervalSeconds))
|
||||
{
|
||||
await RefreshBackendHealthAsync(cancellationToken);
|
||||
}
|
||||
|
||||
return BuildStatus(now);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<TransparencyStatus> RefreshAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await RefreshBackendHealthAsync(cancellationToken);
|
||||
return BuildStatus(_timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void RecordSubmission(bool success, TimeSpan latency)
|
||||
{
|
||||
var entry = new MetricEntry(
|
||||
_timeProvider.GetUtcNow(),
|
||||
success,
|
||||
false,
|
||||
latency.TotalMilliseconds);
|
||||
|
||||
_submissionMetrics.Enqueue(entry);
|
||||
PruneOldMetrics(_submissionMetrics);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Recorded transparency submission: success={Success}, latency={LatencyMs}ms",
|
||||
success,
|
||||
latency.TotalMilliseconds);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void RecordVerification(bool success, bool offline)
|
||||
{
|
||||
var entry = new MetricEntry(
|
||||
_timeProvider.GetUtcNow(),
|
||||
success,
|
||||
offline,
|
||||
0);
|
||||
|
||||
_verificationMetrics.Enqueue(entry);
|
||||
PruneOldMetrics(_verificationMetrics);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Recorded transparency verification: success={Success}, offline={Offline}",
|
||||
success,
|
||||
offline);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void RecordSync(DateTimeOffset syncTime, long treeSize)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_lastSyncAt = syncTime;
|
||||
_lastTreeSize = treeSize;
|
||||
_lastCheckpointVerifiedAt = _timeProvider.GetUtcNow();
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded transparency sync: time={SyncTime}, treeSize={TreeSize}",
|
||||
syncTime,
|
||||
treeSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the queue depths for status reporting.
|
||||
/// </summary>
|
||||
public void UpdateQueueDepths(int submissionQueue, int deadLetterQueue)
|
||||
{
|
||||
Interlocked.Exchange(ref _submissionQueueDepth, submissionQueue);
|
||||
Interlocked.Exchange(ref _deadLetterQueueDepth, deadLetterQueue);
|
||||
}
|
||||
|
||||
private TransparencyStatus BuildStatus(DateTimeOffset now)
|
||||
{
|
||||
double? lastSyncAgeHours = null;
|
||||
DateTimeOffset? lastSync;
|
||||
long lastTreeSize;
|
||||
DateTimeOffset? lastCheckpointVerified;
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
lastSync = _lastSyncAt;
|
||||
lastTreeSize = _lastTreeSize;
|
||||
lastCheckpointVerified = _lastCheckpointVerifiedAt;
|
||||
|
||||
if (_lastSyncAt.HasValue)
|
||||
{
|
||||
lastSyncAgeHours = (now - _lastSyncAt.Value).TotalHours;
|
||||
}
|
||||
}
|
||||
|
||||
var (status, message) = DetermineStatus(lastSyncAgeHours);
|
||||
var metrics = CalculateMetrics(now);
|
||||
var backends = _backendStatuses.Values.ToList();
|
||||
|
||||
return new TransparencyStatus
|
||||
{
|
||||
Status = status,
|
||||
Message = message,
|
||||
LastSyncAt = lastSync,
|
||||
LastSyncAgeHours = lastSyncAgeHours,
|
||||
LastCheckpointVerifiedAt = lastCheckpointVerified,
|
||||
LastCheckpointTreeSize = lastTreeSize > 0 ? lastTreeSize : null,
|
||||
OfflineMode = string.IsNullOrEmpty(_options.RekorBackendUrl),
|
||||
EnforcementEnabled = _options.EnforcementEnabled,
|
||||
MaxCheckpointAgeHours = _options.MaxCheckpointAgeHours,
|
||||
RekorBackend = _options.RekorBackendUrl,
|
||||
RekorMirror = _options.RekorMirrorUrl,
|
||||
SubmissionQueueDepth = _submissionQueueDepth,
|
||||
DeadLetterQueueDepth = _deadLetterQueueDepth,
|
||||
Metrics = metrics,
|
||||
Backends = backends
|
||||
};
|
||||
}
|
||||
|
||||
private (TransparencyStatusLevel, string) DetermineStatus(double? lastSyncAgeHours)
|
||||
{
|
||||
// No backend configured - offline mode
|
||||
if (string.IsNullOrEmpty(_options.RekorBackendUrl))
|
||||
{
|
||||
if (lastSyncAgeHours is null)
|
||||
{
|
||||
return (TransparencyStatusLevel.Offline, "Operating in offline mode - no checkpoint synced");
|
||||
}
|
||||
|
||||
if (lastSyncAgeHours <= _options.MaxCheckpointAgeHours)
|
||||
{
|
||||
return (TransparencyStatusLevel.Offline, $"Operating in offline mode - checkpoint is {lastSyncAgeHours:F1}h old");
|
||||
}
|
||||
|
||||
return (TransparencyStatusLevel.Unhealthy, $"Offline mode with stale checkpoint ({lastSyncAgeHours:F1}h old)");
|
||||
}
|
||||
|
||||
// No sync ever
|
||||
if (lastSyncAgeHours is null)
|
||||
{
|
||||
return (TransparencyStatusLevel.Unknown, "Transparency log never synced");
|
||||
}
|
||||
|
||||
// Fresh checkpoint
|
||||
if (lastSyncAgeHours <= _options.MaxCheckpointAgeHours)
|
||||
{
|
||||
return (TransparencyStatusLevel.Healthy, $"Transparency log synced {lastSyncAgeHours:F1}h ago");
|
||||
}
|
||||
|
||||
// Stale but acceptable
|
||||
if (lastSyncAgeHours <= _options.CriticalCheckpointAgeHours)
|
||||
{
|
||||
return (TransparencyStatusLevel.Degraded, $"Transparency log checkpoint is stale ({lastSyncAgeHours:F1}h old)");
|
||||
}
|
||||
|
||||
// Critical staleness
|
||||
return (TransparencyStatusLevel.Unhealthy, $"Transparency log checkpoint is critically stale ({lastSyncAgeHours:F1}h old)");
|
||||
}
|
||||
|
||||
private TransparencyMetrics CalculateMetrics(DateTimeOffset now)
|
||||
{
|
||||
var oneHourAgo = now.AddHours(-1);
|
||||
|
||||
var recentSubmissions = _submissionMetrics
|
||||
.Where(m => m.Timestamp >= oneHourAgo)
|
||||
.ToList();
|
||||
|
||||
var recentVerifications = _verificationMetrics
|
||||
.Where(m => m.Timestamp >= oneHourAgo)
|
||||
.ToList();
|
||||
|
||||
var successfulSubmissions = recentSubmissions.Where(m => m.Success).ToList();
|
||||
var latencies = successfulSubmissions.Select(m => m.LatencyMs).OrderBy(l => l).ToList();
|
||||
|
||||
return new TransparencyMetrics
|
||||
{
|
||||
SubmissionsLastHour = recentSubmissions.Count,
|
||||
SuccessfulSubmissionsLastHour = successfulSubmissions.Count,
|
||||
FailedSubmissionsLastHour = recentSubmissions.Count - successfulSubmissions.Count,
|
||||
VerificationsLastHour = recentVerifications.Count,
|
||||
SuccessfulVerificationsLastHour = recentVerifications.Count(m => m.Success),
|
||||
AvgSubmissionLatencyMs = latencies.Count > 0 ? latencies.Average() : 0,
|
||||
P95SubmissionLatencyMs = latencies.Count > 0 ? Percentile(latencies, 95) : 0,
|
||||
OfflineVerificationsLastHour = recentVerifications.Count(m => m.Offline)
|
||||
};
|
||||
}
|
||||
|
||||
private async Task RefreshBackendHealthAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_lastHealthCheck = _timeProvider.GetUtcNow();
|
||||
|
||||
var tasks = new List<Task>();
|
||||
|
||||
if (!string.IsNullOrEmpty(_options.RekorBackendUrl))
|
||||
{
|
||||
tasks.Add(CheckBackendHealthAsync("primary", _options.RekorBackendUrl, true, cancellationToken));
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(_options.RekorMirrorUrl))
|
||||
{
|
||||
tasks.Add(CheckBackendHealthAsync("mirror", _options.RekorMirrorUrl, false, cancellationToken));
|
||||
}
|
||||
|
||||
if (tasks.Count > 0)
|
||||
{
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CheckBackendHealthAsync(
|
||||
string id,
|
||||
string url,
|
||||
bool primary,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var status = new TransparencyBackendStatus
|
||||
{
|
||||
Id = id,
|
||||
Url = url,
|
||||
Primary = primary,
|
||||
Status = BackendHealthStatus.Unknown,
|
||||
LastCheckedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
if (_healthChecker is null)
|
||||
{
|
||||
_backendStatuses[id] = status;
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _healthChecker.CheckHealthAsync(
|
||||
url,
|
||||
_options.BackendTimeoutSeconds,
|
||||
cancellationToken);
|
||||
|
||||
var healthStatus = result.IsHealthy
|
||||
? (result.LatencyMs > _options.SlowLatencyThresholdMs ? BackendHealthStatus.Slow : BackendHealthStatus.Healthy)
|
||||
: BackendHealthStatus.Unhealthy;
|
||||
|
||||
status = status with
|
||||
{
|
||||
Status = healthStatus,
|
||||
LatencyMs = result.LatencyMs,
|
||||
Error = result.Error
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to check transparency backend health: {Url}", url);
|
||||
status = status with
|
||||
{
|
||||
Status = BackendHealthStatus.Unhealthy,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
|
||||
_backendStatuses[id] = status;
|
||||
}
|
||||
|
||||
private void PruneOldMetrics(ConcurrentQueue<MetricEntry> queue)
|
||||
{
|
||||
var cutoff = _timeProvider.GetUtcNow().AddHours(-2);
|
||||
|
||||
while (queue.TryPeek(out var entry) && entry.Timestamp < cutoff)
|
||||
{
|
||||
queue.TryDequeue(out _);
|
||||
}
|
||||
}
|
||||
|
||||
private static double Percentile(List<double> values, int percentile)
|
||||
{
|
||||
if (values.Count == 0) return 0;
|
||||
|
||||
var index = (int)Math.Ceiling(percentile / 100.0 * values.Count) - 1;
|
||||
return values[Math.Max(0, Math.Min(index, values.Count - 1))];
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
// No unmanaged resources to dispose
|
||||
}
|
||||
|
||||
private sealed record MetricEntry(
|
||||
DateTimeOffset Timestamp,
|
||||
bool Success,
|
||||
bool Offline,
|
||||
double LatencyMs);
|
||||
}
|
||||
@@ -0,0 +1,469 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
namespace StellaOps.Attestor.Infrastructure.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// HTTP client for fetching proofs from Rekor v2 tile-based logs.
|
||||
/// Implements the Sunlight/C2SP tlog-tiles specification.
|
||||
/// </summary>
|
||||
internal sealed class HttpRekorTileClient : IRekorTileClient
|
||||
{
|
||||
private const int TileHeight = 8; // Standard tile height (2^8 = 256 entries per tile)
|
||||
private const int TileWidth = 1 << TileHeight; // 256 entries per full tile
|
||||
private const int HashSize = 32; // SHA-256
|
||||
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ILogger<HttpRekorTileClient> _logger;
|
||||
|
||||
public HttpRekorTileClient(HttpClient httpClient, ILogger<HttpRekorTileClient> logger)
|
||||
{
|
||||
_httpClient = httpClient;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<RekorTileCheckpoint?> GetCheckpointAsync(
|
||||
RekorBackend backend,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(backend);
|
||||
|
||||
var checkpointUrl = new Uri(backend.GetEffectiveTileBaseUrl(), "../checkpoint");
|
||||
_logger.LogDebug("Fetching checkpoint from {Url}", checkpointUrl);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, checkpointUrl);
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogDebug("Checkpoint not found at {Url}", checkpointUrl);
|
||||
return null;
|
||||
}
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var content = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
|
||||
return ParseCheckpoint(content);
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to fetch checkpoint from {Url}", checkpointUrl);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<RekorTileData?> GetTileAsync(
|
||||
RekorBackend backend,
|
||||
int level,
|
||||
long index,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(backend);
|
||||
|
||||
// Tile path format: tile/{level}/{index...} where index is split into directories
|
||||
var tilePath = FormatTilePath(level, index);
|
||||
var tileUrl = new Uri(backend.GetEffectiveTileBaseUrl(), tilePath);
|
||||
|
||||
_logger.LogDebug("Fetching tile at level {Level} index {Index} from {Url}", level, index, tileUrl);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, tileUrl);
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogDebug("Tile not found at {Url}", tileUrl);
|
||||
return null;
|
||||
}
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var data = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
|
||||
var width = data.Length / HashSize;
|
||||
|
||||
return new RekorTileData
|
||||
{
|
||||
Level = level,
|
||||
Index = index,
|
||||
Width = width,
|
||||
Hashes = data
|
||||
};
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to fetch tile from {Url}", tileUrl);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<RekorTileEntry?> GetEntryAsync(
|
||||
RekorBackend backend,
|
||||
long logIndex,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(backend);
|
||||
|
||||
// Entry path format: tile/entries/{index...}
|
||||
var entryPath = FormatEntryPath(logIndex);
|
||||
var entryUrl = new Uri(backend.GetEffectiveTileBaseUrl(), entryPath);
|
||||
|
||||
_logger.LogDebug("Fetching entry at index {Index} from {Url}", logIndex, entryUrl);
|
||||
|
||||
try
|
||||
{
|
||||
using var request = new HttpRequestMessage(HttpMethod.Get, entryUrl);
|
||||
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogDebug("Entry not found at {Url}", entryUrl);
|
||||
return null;
|
||||
}
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var data = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new RekorTileEntry
|
||||
{
|
||||
LogIndex = logIndex,
|
||||
Data = data,
|
||||
IntegratedTime = null // Would need to parse from entry format
|
||||
};
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to fetch entry from {Url}", entryUrl);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<RekorTileInclusionProof?> ComputeInclusionProofAsync(
|
||||
RekorBackend backend,
|
||||
long logIndex,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(backend);
|
||||
|
||||
if (logIndex < 0 || logIndex >= treeSize)
|
||||
{
|
||||
_logger.LogWarning("Invalid log index {Index} for tree size {Size}", logIndex, treeSize);
|
||||
return null;
|
||||
}
|
||||
|
||||
_logger.LogDebug("Computing inclusion proof for index {Index} in tree of size {Size}", logIndex, treeSize);
|
||||
|
||||
try
|
||||
{
|
||||
// Fetch the leaf tile to get the leaf hash
|
||||
var leafTileIndex = logIndex / TileWidth;
|
||||
var leafTile = await GetTileAsync(backend, 0, leafTileIndex, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (leafTile is null)
|
||||
{
|
||||
_logger.LogWarning("Failed to fetch leaf tile for index {Index}", logIndex);
|
||||
return null;
|
||||
}
|
||||
|
||||
var positionInTile = (int)(logIndex % TileWidth);
|
||||
if (positionInTile >= leafTile.Width)
|
||||
{
|
||||
_logger.LogWarning("Position {Position} exceeds tile width {Width}", positionInTile, leafTile.Width);
|
||||
return null;
|
||||
}
|
||||
|
||||
var leafHash = leafTile.GetHash(positionInTile);
|
||||
|
||||
// Compute the proof path by fetching required tiles
|
||||
var path = await ComputeProofPathAsync(backend, logIndex, treeSize, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (path is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Compute expected root hash from path
|
||||
var rootHash = ComputeRootFromPath(leafHash, logIndex, treeSize, path);
|
||||
|
||||
return new RekorTileInclusionProof
|
||||
{
|
||||
LogIndex = logIndex,
|
||||
TreeSize = treeSize,
|
||||
LeafHash = leafHash,
|
||||
Path = path,
|
||||
RootHash = rootHash
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to compute inclusion proof for index {Index}", logIndex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<byte[]>?> ComputeProofPathAsync(
|
||||
RekorBackend backend,
|
||||
long logIndex,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var path = new List<byte[]>();
|
||||
var index = logIndex;
|
||||
var size = treeSize;
|
||||
var level = 0;
|
||||
|
||||
while (size > 1)
|
||||
{
|
||||
var siblingIndex = index ^ 1; // XOR to get sibling
|
||||
var tileIndex = siblingIndex / TileWidth;
|
||||
var positionInTile = (int)(siblingIndex % TileWidth);
|
||||
|
||||
// Only add sibling if it exists in the tree
|
||||
if (siblingIndex < size)
|
||||
{
|
||||
var tile = await GetTileAsync(backend, level, tileIndex, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (tile is null || positionInTile >= tile.Width)
|
||||
{
|
||||
// For partial trees, compute ephemeral hash if needed
|
||||
_logger.LogDebug("Sibling at level {Level} index {Index} not in tile, tree may be partial", level, siblingIndex);
|
||||
|
||||
// For now, return null if we can't get the sibling
|
||||
// A full implementation would handle partial tiles
|
||||
return null;
|
||||
}
|
||||
|
||||
path.Add(tile.GetHash(positionInTile));
|
||||
}
|
||||
|
||||
index /= 2;
|
||||
size = (size + 1) / 2;
|
||||
level++;
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
private static byte[] ComputeRootFromPath(byte[] leafHash, long logIndex, long treeSize, IReadOnlyList<byte[]> path)
|
||||
{
|
||||
var current = leafHash;
|
||||
var index = logIndex;
|
||||
var size = treeSize;
|
||||
var pathIndex = 0;
|
||||
|
||||
while (size > 1 && pathIndex < path.Count)
|
||||
{
|
||||
var siblingIndex = index ^ 1;
|
||||
|
||||
if (siblingIndex < size)
|
||||
{
|
||||
var sibling = path[pathIndex++];
|
||||
|
||||
// Hash order depends on position
|
||||
current = (index & 1) == 0
|
||||
? HashPair(current, sibling)
|
||||
: HashPair(sibling, current);
|
||||
}
|
||||
|
||||
index /= 2;
|
||||
size = (size + 1) / 2;
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private static byte[] HashPair(byte[] left, byte[] right)
|
||||
{
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
|
||||
// RFC 6962: H(0x01 || left || right)
|
||||
var input = new byte[1 + left.Length + right.Length];
|
||||
input[0] = 0x01;
|
||||
Array.Copy(left, 0, input, 1, left.Length);
|
||||
Array.Copy(right, 0, input, 1 + left.Length, right.Length);
|
||||
|
||||
return sha256.ComputeHash(input);
|
||||
}
|
||||
|
||||
private RekorTileCheckpoint? ParseCheckpoint(string content)
|
||||
{
|
||||
// Checkpoint format (Go signed note format):
|
||||
// <origin>
|
||||
// <tree_size>
|
||||
// <root_hash_base64>
|
||||
// [optional extension lines]
|
||||
//
|
||||
// <signature_line>...
|
||||
|
||||
var lines = content.Split('\n', StringSplitOptions.None);
|
||||
|
||||
if (lines.Length < 4)
|
||||
{
|
||||
_logger.LogWarning("Checkpoint has too few lines: {Count}", lines.Length);
|
||||
return null;
|
||||
}
|
||||
|
||||
var origin = lines[0];
|
||||
if (!long.TryParse(lines[1], NumberStyles.None, CultureInfo.InvariantCulture, out var treeSize))
|
||||
{
|
||||
_logger.LogWarning("Invalid tree size in checkpoint: {Line}", lines[1]);
|
||||
return null;
|
||||
}
|
||||
|
||||
byte[] rootHash;
|
||||
try
|
||||
{
|
||||
rootHash = Convert.FromBase64String(lines[2]);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
_logger.LogWarning("Invalid root hash base64 in checkpoint: {Line}", lines[2]);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Find the blank line that separates checkpoint from signatures
|
||||
var signatureStartIndex = -1;
|
||||
for (var i = 3; i < lines.Length; i++)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(lines[i]))
|
||||
{
|
||||
signatureStartIndex = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var signatures = new List<RekorCheckpointSignature>();
|
||||
if (signatureStartIndex > 0)
|
||||
{
|
||||
for (var i = signatureStartIndex; i < lines.Length; i++)
|
||||
{
|
||||
var sigLine = lines[i];
|
||||
if (string.IsNullOrWhiteSpace(sigLine))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Signature format: <key_hint> <signature_base64>
|
||||
var parts = sigLine.Split(' ', 2);
|
||||
if (parts.Length >= 2)
|
||||
{
|
||||
try
|
||||
{
|
||||
signatures.Add(new RekorCheckpointSignature
|
||||
{
|
||||
KeyHint = parts[0],
|
||||
Signature = Convert.FromBase64String(parts[1])
|
||||
});
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
_logger.LogDebug("Skipping invalid signature line: {Line}", sigLine);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract raw checkpoint (everything before signatures)
|
||||
var rawCheckpointEnd = signatureStartIndex > 0 ? signatureStartIndex - 1 : lines.Length;
|
||||
var rawCheckpoint = string.Join('\n', lines[..rawCheckpointEnd]);
|
||||
|
||||
return new RekorTileCheckpoint
|
||||
{
|
||||
Origin = origin,
|
||||
TreeSize = treeSize,
|
||||
RootHash = rootHash,
|
||||
RawCheckpoint = rawCheckpoint,
|
||||
Signatures = signatures
|
||||
};
|
||||
}
|
||||
|
||||
private static string FormatTilePath(int level, long index)
|
||||
{
|
||||
// Tile path uses base-1000 directory structure for scalability
|
||||
// e.g., tile/0/x001/234 for level 0, index 1234
|
||||
var sb = new StringBuilder();
|
||||
sb.Append(level.ToString(CultureInfo.InvariantCulture));
|
||||
sb.Append('/');
|
||||
|
||||
if (index == 0)
|
||||
{
|
||||
sb.Append("000");
|
||||
}
|
||||
else
|
||||
{
|
||||
var parts = new List<string>();
|
||||
var remaining = index;
|
||||
while (remaining > 0)
|
||||
{
|
||||
parts.Add((remaining % 1000).ToString("D3", CultureInfo.InvariantCulture));
|
||||
remaining /= 1000;
|
||||
}
|
||||
|
||||
parts.Reverse();
|
||||
// First part doesn't need leading zeros padding to 3 digits if it's the most significant
|
||||
if (parts.Count > 0)
|
||||
{
|
||||
parts[0] = parts[0].TrimStart('0');
|
||||
if (string.IsNullOrEmpty(parts[0]))
|
||||
{
|
||||
parts[0] = "0";
|
||||
}
|
||||
}
|
||||
|
||||
sb.Append(string.Join('/', parts));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private static string FormatEntryPath(long index)
|
||||
{
|
||||
// Entry path: entries/{index...}
|
||||
var sb = new StringBuilder("entries/");
|
||||
|
||||
if (index == 0)
|
||||
{
|
||||
sb.Append("000");
|
||||
}
|
||||
else
|
||||
{
|
||||
var parts = new List<string>();
|
||||
var remaining = index;
|
||||
while (remaining > 0)
|
||||
{
|
||||
parts.Add((remaining % 1000).ToString("D3", CultureInfo.InvariantCulture));
|
||||
remaining /= 1000;
|
||||
}
|
||||
|
||||
parts.Reverse();
|
||||
if (parts.Count > 0)
|
||||
{
|
||||
parts[0] = parts[0].TrimStart('0');
|
||||
if (string.IsNullOrEmpty(parts[0]))
|
||||
{
|
||||
parts[0] = "0";
|
||||
}
|
||||
}
|
||||
|
||||
sb.Append(string.Join('/', parts));
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
@@ -47,9 +47,43 @@ internal static class RekorBackendResolver
|
||||
{
|
||||
Name = name,
|
||||
Url = new Uri(options.Url, UriKind.Absolute),
|
||||
Version = ParseLogVersion(options.Version),
|
||||
TileBaseUrl = string.IsNullOrWhiteSpace(options.TileBaseUrl)
|
||||
? null
|
||||
: new Uri(options.TileBaseUrl, UriKind.Absolute),
|
||||
LogId = options.LogId,
|
||||
PreferTileProofs = options.PreferTileProofs,
|
||||
ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs),
|
||||
PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs),
|
||||
MaxAttempts = options.MaxAttempts
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses the log version string to the enum value.
|
||||
/// </summary>
|
||||
private static RekorLogVersion ParseLogVersion(string? version)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(version))
|
||||
{
|
||||
return RekorLogVersion.Auto;
|
||||
}
|
||||
|
||||
return version.Trim().ToUpperInvariant() switch
|
||||
{
|
||||
"AUTO" => RekorLogVersion.Auto,
|
||||
"V1" or "1" => RekorLogVersion.V1,
|
||||
"V2" or "2" => RekorLogVersion.V2,
|
||||
_ => RekorLogVersion.Auto
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines if the backend should use tile-based verification.
|
||||
/// </summary>
|
||||
public static bool ShouldUseTileProofs(RekorBackend backend)
|
||||
{
|
||||
return backend.Version == RekorLogVersion.V2 ||
|
||||
(backend.Version == RekorLogVersion.Auto && backend.PreferTileProofs);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,6 +96,20 @@ public static class ServiceCollectionExtensions
|
||||
});
|
||||
services.AddSingleton<IRekorClient>(sp => sp.GetRequiredService<HttpRekorClient>());
|
||||
|
||||
// Rekor v2 tile-based client for Sunlight/tile log format
|
||||
services.AddHttpClient<HttpRekorTileClient>((sp, client) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
|
||||
var timeoutMs = options.Rekor.Primary.ProofTimeoutMs;
|
||||
if (timeoutMs <= 0)
|
||||
{
|
||||
timeoutMs = 15_000;
|
||||
}
|
||||
|
||||
client.Timeout = TimeSpan.FromMilliseconds(timeoutMs);
|
||||
});
|
||||
services.AddSingleton<IRekorTileClient>(sp => sp.GetRequiredService<HttpRekorTileClient>());
|
||||
|
||||
services.AddHttpClient<HttpTransparencyWitnessClient>((sp, client) =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
|
||||
|
||||
@@ -0,0 +1,313 @@
|
||||
using System;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using StellaOps.Attestor.Infrastructure.Rekor;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Infrastructure.Tests;
|
||||
|
||||
public sealed class HttpRekorTileClientTests
|
||||
{
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task GetCheckpointAsync_ValidCheckpoint_ParsesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var checkpoint = """
|
||||
rekor.sigstore.dev - 2605736670972794746
|
||||
12345678
|
||||
rMj3G9LfM9C6Xt0qpV3pHbM2q5lPvKjS0mOmV8jXwAk=
|
||||
|
||||
- rekor.sigstore.dev ABC123signature==
|
||||
""";
|
||||
|
||||
var client = CreateClient(new CheckpointHandler(checkpoint));
|
||||
var backend = CreateBackend();
|
||||
|
||||
// Act
|
||||
var result = await client.GetCheckpointAsync(backend, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Origin.Should().Be("rekor.sigstore.dev - 2605736670972794746");
|
||||
result.TreeSize.Should().Be(12345678);
|
||||
result.RootHash.Should().NotBeNullOrEmpty();
|
||||
result.Signatures.Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task GetCheckpointAsync_NotFound_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var client = CreateClient(new NotFoundHandler());
|
||||
var backend = CreateBackend();
|
||||
|
||||
// Act
|
||||
var result = await client.GetCheckpointAsync(backend, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task GetTileAsync_ValidTile_ReturnsTileData()
|
||||
{
|
||||
// Arrange - 256 hashes (32 bytes each) = 8192 bytes
|
||||
var tileData = new byte[32 * 4]; // 4 hashes for simplicity
|
||||
Random.Shared.NextBytes(tileData);
|
||||
|
||||
var client = CreateClient(new TileHandler(tileData));
|
||||
var backend = CreateBackend();
|
||||
|
||||
// Act
|
||||
var result = await client.GetTileAsync(backend, level: 0, index: 0, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.Level.Should().Be(0);
|
||||
result.Index.Should().Be(0);
|
||||
result.Width.Should().Be(4);
|
||||
result.Hashes.Should().Equal(tileData);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task GetTileAsync_NotFound_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var client = CreateClient(new NotFoundHandler());
|
||||
var backend = CreateBackend();
|
||||
|
||||
// Act
|
||||
var result = await client.GetTileAsync(backend, level: 0, index: 999999, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void RekorTileData_GetHash_ReturnsCorrectHash()
|
||||
{
|
||||
// Arrange
|
||||
var hash1 = new byte[32];
|
||||
var hash2 = new byte[32];
|
||||
Random.Shared.NextBytes(hash1);
|
||||
Random.Shared.NextBytes(hash2);
|
||||
|
||||
var hashes = new byte[64];
|
||||
Array.Copy(hash1, 0, hashes, 0, 32);
|
||||
Array.Copy(hash2, 0, hashes, 32, 32);
|
||||
|
||||
var tile = new RekorTileData
|
||||
{
|
||||
Level = 0,
|
||||
Index = 0,
|
||||
Width = 2,
|
||||
Hashes = hashes
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
tile.GetHash(0).Should().Equal(hash1);
|
||||
tile.GetHash(1).Should().Equal(hash2);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void RekorTileData_GetHash_OutOfRange_Throws()
|
||||
{
|
||||
// Arrange
|
||||
var tile = new RekorTileData
|
||||
{
|
||||
Level = 0,
|
||||
Index = 0,
|
||||
Width = 2,
|
||||
Hashes = new byte[64]
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
var action = () => tile.GetHash(2);
|
||||
action.Should().Throw<ArgumentOutOfRangeException>();
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void RekorBackend_GetEffectiveTileBaseUrl_WithoutConfig_ReturnsDefault()
|
||||
{
|
||||
// Arrange
|
||||
var backend = new RekorBackend
|
||||
{
|
||||
Name = "test",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
Version = RekorLogVersion.V2
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = backend.GetEffectiveTileBaseUrl();
|
||||
|
||||
// Assert
|
||||
result.Should().Be(new Uri("https://rekor.sigstore.dev/tile/"));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void RekorBackend_GetEffectiveTileBaseUrl_WithConfig_ReturnsConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var backend = new RekorBackend
|
||||
{
|
||||
Name = "test",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
Version = RekorLogVersion.V2,
|
||||
TileBaseUrl = new Uri("https://tiles.rekor.sigstore.dev/")
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = backend.GetEffectiveTileBaseUrl();
|
||||
|
||||
// Assert
|
||||
result.Should().Be(new Uri("https://tiles.rekor.sigstore.dev/"));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Theory]
|
||||
[InlineData(RekorLogVersion.V2, false, true)]
|
||||
[InlineData(RekorLogVersion.V1, false, false)]
|
||||
[InlineData(RekorLogVersion.V1, true, false)]
|
||||
[InlineData(RekorLogVersion.Auto, false, false)]
|
||||
[InlineData(RekorLogVersion.Auto, true, true)]
|
||||
public void ShouldUseTileProofs_ReturnsExpected(RekorLogVersion version, bool preferTiles, bool expected)
|
||||
{
|
||||
// Arrange
|
||||
var backend = new RekorBackend
|
||||
{
|
||||
Name = "test",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
Version = version,
|
||||
PreferTileProofs = preferTiles
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = RekorBackendResolver.ShouldUseTileProofs(backend);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task GetEntryAsync_NotFound_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var client = CreateClient(new NotFoundHandler());
|
||||
var backend = CreateBackend();
|
||||
|
||||
// Act
|
||||
var result = await client.GetEntryAsync(backend, logIndex: 12345, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task ComputeInclusionProofAsync_InvalidIndex_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var client = CreateClient(new NotFoundHandler());
|
||||
var backend = CreateBackend();
|
||||
|
||||
// Act - index >= treeSize
|
||||
var result = await client.ComputeInclusionProofAsync(backend, logIndex: 100, treeSize: 50, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
private static HttpRekorTileClient CreateClient(HttpMessageHandler handler)
|
||||
{
|
||||
var httpClient = new HttpClient(handler)
|
||||
{
|
||||
BaseAddress = new Uri("https://rekor.sigstore.dev")
|
||||
};
|
||||
|
||||
return new HttpRekorTileClient(httpClient, NullLogger<HttpRekorTileClient>.Instance);
|
||||
}
|
||||
|
||||
private static RekorBackend CreateBackend()
|
||||
{
|
||||
return new RekorBackend
|
||||
{
|
||||
Name = "primary",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
Version = RekorLogVersion.V2
|
||||
};
|
||||
}
|
||||
|
||||
private sealed class CheckpointHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly string _checkpoint;
|
||||
|
||||
public CheckpointHandler(string checkpoint)
|
||||
{
|
||||
_checkpoint = checkpoint;
|
||||
}
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
var path = request.RequestUri?.AbsolutePath ?? string.Empty;
|
||||
|
||||
if (path.Contains("checkpoint", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = new StringContent(_checkpoint, Encoding.UTF8, "text/plain")
|
||||
});
|
||||
}
|
||||
|
||||
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class TileHandler : HttpMessageHandler
|
||||
{
|
||||
private readonly byte[] _tileData;
|
||||
|
||||
public TileHandler(byte[] tileData)
|
||||
{
|
||||
_tileData = tileData;
|
||||
}
|
||||
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
var path = request.RequestUri?.AbsolutePath ?? string.Empty;
|
||||
|
||||
if (path.Contains("tile/", StringComparison.OrdinalIgnoreCase) && !path.Contains("checkpoint", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)
|
||||
{
|
||||
Content = new ByteArrayContent(_tileData)
|
||||
});
|
||||
}
|
||||
|
||||
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class NotFoundHandler : HttpMessageHandler
|
||||
{
|
||||
protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
|
||||
{
|
||||
return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Attestor.Core.Options;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using StellaOps.Attestor.Infrastructure.Rekor;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
@@ -35,6 +36,155 @@ public sealed class RekorBackendResolverTests
|
||||
backend.Url.Should().Be(new Uri("https://rekor.primary.example"));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Theory]
|
||||
[InlineData("Auto", RekorLogVersion.Auto)]
|
||||
[InlineData("auto", RekorLogVersion.Auto)]
|
||||
[InlineData("V1", RekorLogVersion.V1)]
|
||||
[InlineData("v1", RekorLogVersion.V1)]
|
||||
[InlineData("1", RekorLogVersion.V1)]
|
||||
[InlineData("V2", RekorLogVersion.V2)]
|
||||
[InlineData("v2", RekorLogVersion.V2)]
|
||||
[InlineData("2", RekorLogVersion.V2)]
|
||||
[InlineData("", RekorLogVersion.Auto)]
|
||||
[InlineData(null, RekorLogVersion.Auto)]
|
||||
[InlineData("invalid", RekorLogVersion.Auto)]
|
||||
public void ResolveBackend_ParsesVersionCorrectly(string? versionString, RekorLogVersion expected)
|
||||
{
|
||||
var options = new AttestorOptions
|
||||
{
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.sigstore.dev",
|
||||
Version = versionString ?? "Auto"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var backend = RekorBackendResolver.ResolveBackend(options, "primary", allowFallbackToPrimary: false);
|
||||
|
||||
backend.Version.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void ResolveBackend_WithTileBaseUrl_SetsProperty()
|
||||
{
|
||||
var options = new AttestorOptions
|
||||
{
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.sigstore.dev",
|
||||
Version = "V2",
|
||||
TileBaseUrl = "https://rekor.sigstore.dev/tile/"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var backend = RekorBackendResolver.ResolveBackend(options, "primary", allowFallbackToPrimary: false);
|
||||
|
||||
backend.Version.Should().Be(RekorLogVersion.V2);
|
||||
backend.TileBaseUrl.Should().Be(new Uri("https://rekor.sigstore.dev/tile/"));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void ResolveBackend_WithLogId_SetsProperty()
|
||||
{
|
||||
var options = new AttestorOptions
|
||||
{
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.sigstore.dev",
|
||||
LogId = RekorBackend.SigstoreProductionLogId
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var backend = RekorBackendResolver.ResolveBackend(options, "primary", allowFallbackToPrimary: false);
|
||||
|
||||
backend.LogId.Should().Be(RekorBackend.SigstoreProductionLogId);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void ResolveBackend_WithPreferTileProofs_SetsProperty()
|
||||
{
|
||||
var options = new AttestorOptions
|
||||
{
|
||||
Rekor = new AttestorOptions.RekorOptions
|
||||
{
|
||||
Primary = new AttestorOptions.RekorBackendOptions
|
||||
{
|
||||
Url = "https://rekor.sigstore.dev",
|
||||
PreferTileProofs = true
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var backend = RekorBackendResolver.ResolveBackend(options, "primary", allowFallbackToPrimary: false);
|
||||
|
||||
backend.PreferTileProofs.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Theory]
|
||||
[InlineData(RekorLogVersion.V2, false, true)]
|
||||
[InlineData(RekorLogVersion.V1, true, false)]
|
||||
[InlineData(RekorLogVersion.Auto, true, true)]
|
||||
[InlineData(RekorLogVersion.Auto, false, false)]
|
||||
public void ShouldUseTileProofs_ReturnsCorrectValue(RekorLogVersion version, bool preferTileProofs, bool expected)
|
||||
{
|
||||
var backend = new RekorBackend
|
||||
{
|
||||
Name = "test",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
Version = version,
|
||||
PreferTileProofs = preferTileProofs
|
||||
};
|
||||
|
||||
var result = RekorBackendResolver.ShouldUseTileProofs(backend);
|
||||
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void GetEffectiveTileBaseUrl_WithoutTileBaseUrl_ReturnsDefault()
|
||||
{
|
||||
var backend = new RekorBackend
|
||||
{
|
||||
Name = "test",
|
||||
Url = new Uri("https://rekor.sigstore.dev")
|
||||
};
|
||||
|
||||
var result = backend.GetEffectiveTileBaseUrl();
|
||||
|
||||
result.Should().Be(new Uri("https://rekor.sigstore.dev/tile/"));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void GetEffectiveTileBaseUrl_WithTileBaseUrl_ReturnsConfigured()
|
||||
{
|
||||
var backend = new RekorBackend
|
||||
{
|
||||
Name = "test",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
TileBaseUrl = new Uri("https://custom.tile.endpoint/v2/tile/")
|
||||
};
|
||||
|
||||
var result = backend.GetEffectiveTileBaseUrl();
|
||||
|
||||
result.Should().Be(new Uri("https://custom.tile.endpoint/v2/tile/"));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void ResolveBackend_UnknownBackend_ThrowsWhenFallbackDisabled()
|
||||
|
||||
@@ -147,7 +147,7 @@ public static partial class ChangelogParser
|
||||
}
|
||||
|
||||
currentDate = ParseRpmDate(headerMatch.Groups[1].Value);
|
||||
currentVersion = headerMatch.Groups[2].Value;
|
||||
currentVersion = headerMatch.Groups[2].Value.Trim();
|
||||
currentCves.Clear();
|
||||
currentBugs.Clear();
|
||||
currentDescription.Clear();
|
||||
|
||||
@@ -0,0 +1,161 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if email (SMTP) notification channel is properly configured.
|
||||
/// </summary>
|
||||
public sealed class EmailConfiguredCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.email.configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Email Configuration";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify email (SMTP) notification channel is properly configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "email", "smtp", "quick", "configuration"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromMilliseconds(100);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var emailConfig = context.Configuration.GetSection("Notify:Channels:Email");
|
||||
return emailConfig.Exists();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
var emailConfig = context.Configuration.GetSection("Notify:Channels:Email");
|
||||
|
||||
var smtpHost = emailConfig["SmtpHost"] ?? emailConfig["Host"];
|
||||
var smtpPort = emailConfig.GetValue<int?>("SmtpPort") ?? emailConfig.GetValue<int?>("Port") ?? 0;
|
||||
var fromAddress = emailConfig["FromAddress"] ?? emailConfig["From"];
|
||||
var enabled = emailConfig.GetValue<bool>("Enabled", true);
|
||||
var useSsl = emailConfig.GetValue<bool>("UseSsl", true);
|
||||
var username = emailConfig["Username"];
|
||||
|
||||
var hasHost = !string.IsNullOrWhiteSpace(smtpHost);
|
||||
var hasFrom = !string.IsNullOrWhiteSpace(fromAddress);
|
||||
var hasValidPort = smtpPort > 0 && smtpPort <= 65535;
|
||||
|
||||
if (!hasHost)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Fail("SMTP host is not configured")
|
||||
.WithEvidence("Email configuration status", eb => eb
|
||||
.Add("SmtpHost", "(not set)")
|
||||
.Add("SmtpPort", smtpPort > 0 ? smtpPort.ToString() : "(not set)")
|
||||
.Add("FromAddress", hasFrom ? fromAddress! : "(not set)")
|
||||
.Add("Enabled", enabled.ToString()))
|
||||
.WithCauses(
|
||||
"SMTP host not set in configuration",
|
||||
"Missing Notify:Channels:Email:SmtpHost setting")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Add SMTP configuration",
|
||||
"# Add to appsettings.json:\n" +
|
||||
"# \"Notify\": { \"Channels\": { \"Email\": {\n" +
|
||||
"# \"SmtpHost\": \"smtp.example.com\",\n" +
|
||||
"# \"SmtpPort\": 587,\n" +
|
||||
"# \"FromAddress\": \"noreply@example.com\",\n" +
|
||||
"# \"UseSsl\": true\n" +
|
||||
"# } } }",
|
||||
CommandType.FileEdit)
|
||||
.AddStep(2, "Or set via environment variables",
|
||||
"export Notify__Channels__Email__SmtpHost=\"smtp.example.com\"\n" +
|
||||
"export Notify__Channels__Email__SmtpPort=\"587\"\n" +
|
||||
"export Notify__Channels__Email__FromAddress=\"noreply@example.com\"",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!hasValidPort)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("SMTP port is not configured or invalid")
|
||||
.WithEvidence("Email configuration status", eb => eb
|
||||
.Add("SmtpHost", smtpHost!)
|
||||
.Add("SmtpPort", smtpPort > 0 ? smtpPort.ToString() : "(not set or invalid)")
|
||||
.Add("FromAddress", hasFrom ? fromAddress! : "(not set)")
|
||||
.Add("Enabled", enabled.ToString())
|
||||
.Add("Note", "Common ports: 25 (unencrypted), 465 (SSL), 587 (TLS/STARTTLS)"))
|
||||
.WithCauses(
|
||||
"SMTP port not specified",
|
||||
"Invalid port number")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Set SMTP port",
|
||||
"# Common SMTP ports:\n# 25 - Standard SMTP (often blocked)\n# 465 - SMTP over SSL\n# 587 - SMTP with STARTTLS (recommended)",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!hasFrom)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("From address is not configured")
|
||||
.WithEvidence("Email configuration status", eb => eb
|
||||
.Add("SmtpHost", smtpHost!)
|
||||
.Add("SmtpPort", smtpPort.ToString())
|
||||
.Add("FromAddress", "(not set)")
|
||||
.Add("Enabled", enabled.ToString()))
|
||||
.WithCauses(
|
||||
"From address not configured",
|
||||
"Emails may be rejected without a valid sender")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Set from address",
|
||||
"# Add Notify:Channels:Email:FromAddress to configuration",
|
||||
CommandType.FileEdit))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!enabled)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("Email channel is configured but disabled")
|
||||
.WithEvidence("Email configuration status", eb => eb
|
||||
.Add("SmtpHost", smtpHost!)
|
||||
.Add("SmtpPort", smtpPort.ToString())
|
||||
.Add("FromAddress", fromAddress!)
|
||||
.Add("Enabled", "false")
|
||||
.Add("UseSsl", useSsl.ToString())
|
||||
.Add("HasCredentials", !string.IsNullOrWhiteSpace(username) ? "yes" : "no"))
|
||||
.WithCauses(
|
||||
"Email notifications explicitly disabled")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Enable email notifications",
|
||||
"# Set Notify:Channels:Email:Enabled to true",
|
||||
CommandType.FileEdit))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
return Task.FromResult(builder
|
||||
.Pass("Email notification channel is properly configured")
|
||||
.WithEvidence("Email configuration status", eb => eb
|
||||
.Add("SmtpHost", smtpHost!)
|
||||
.Add("SmtpPort", smtpPort.ToString())
|
||||
.Add("FromAddress", fromAddress!)
|
||||
.Add("Enabled", "true")
|
||||
.Add("UseSsl", useSsl.ToString())
|
||||
.Add("HasCredentials", !string.IsNullOrWhiteSpace(username) ? "yes" : "no"))
|
||||
.Build());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,186 @@
|
||||
using System.Globalization;
|
||||
using System.Net.Sockets;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the configured SMTP server is reachable.
|
||||
/// </summary>
|
||||
public sealed class EmailConnectivityCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.email.connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Email Connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify SMTP server is reachable";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "email", "smtp", "connectivity", "network"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var emailConfig = context.Configuration.GetSection("Notify:Channels:Email");
|
||||
var smtpHost = emailConfig["SmtpHost"] ?? emailConfig["Host"];
|
||||
var smtpPort = emailConfig.GetValue<int?>("SmtpPort") ?? emailConfig.GetValue<int?>("Port") ?? 0;
|
||||
|
||||
return !string.IsNullOrWhiteSpace(smtpHost) && smtpPort > 0;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var emailConfig = context.Configuration.GetSection("Notify:Channels:Email");
|
||||
var smtpHost = emailConfig["SmtpHost"] ?? emailConfig["Host"]!;
|
||||
var smtpPort = emailConfig.GetValue<int?>("SmtpPort") ?? emailConfig.GetValue<int?>("Port") ?? 587;
|
||||
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
|
||||
try
|
||||
{
|
||||
using var tcpClient = new TcpClient();
|
||||
|
||||
// Set connection timeout
|
||||
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(ct);
|
||||
timeoutCts.CancelAfter(TimeSpan.FromSeconds(10));
|
||||
|
||||
await tcpClient.ConnectAsync(smtpHost, smtpPort, timeoutCts.Token);
|
||||
|
||||
if (tcpClient.Connected)
|
||||
{
|
||||
// Try to read the SMTP banner
|
||||
using var stream = tcpClient.GetStream();
|
||||
stream.ReadTimeout = 5000;
|
||||
|
||||
var buffer = new byte[1024];
|
||||
string? banner = null;
|
||||
|
||||
try
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(buffer, timeoutCts.Token);
|
||||
if (bytesRead > 0)
|
||||
{
|
||||
banner = System.Text.Encoding.ASCII.GetString(buffer, 0, bytesRead).Trim();
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Banner read failed, but connection succeeded
|
||||
}
|
||||
|
||||
var isSmtp = banner?.StartsWith("220", StringComparison.Ordinal) == true;
|
||||
|
||||
if (isSmtp)
|
||||
{
|
||||
return builder
|
||||
.Pass("SMTP server is reachable and responding")
|
||||
.WithEvidence("SMTP connectivity test", eb => eb
|
||||
.Add("SmtpHost", smtpHost)
|
||||
.Add("SmtpPort", smtpPort.ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Banner", banner?.Length > 100 ? banner[..100] + "..." : banner ?? "(none)"))
|
||||
.Build();
|
||||
}
|
||||
|
||||
return builder
|
||||
.Info("Connection to SMTP port succeeded but banner not recognized")
|
||||
.WithEvidence("SMTP connectivity test", eb => eb
|
||||
.Add("SmtpHost", smtpHost)
|
||||
.Add("SmtpPort", smtpPort.ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Banner", banner ?? "(none)")
|
||||
.Add("Note", "Connection succeeded but response doesn't look like SMTP"))
|
||||
.Build();
|
||||
}
|
||||
|
||||
return builder
|
||||
.Fail("Failed to connect to SMTP server")
|
||||
.WithEvidence("SMTP connectivity test", eb => eb
|
||||
.Add("SmtpHost", smtpHost)
|
||||
.Add("SmtpPort", smtpPort.ToString(CultureInfo.InvariantCulture)))
|
||||
.WithCauses(
|
||||
"SMTP server not running",
|
||||
"Wrong host or port",
|
||||
"Firewall blocking connection")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test port connectivity",
|
||||
$"nc -zv {smtpHost} {smtpPort}",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Test with telnet",
|
||||
$"telnet {smtpHost} {smtpPort}",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (OperationCanceledException) when (ct.IsCancellationRequested)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
return builder
|
||||
.Fail("SMTP connection timed out")
|
||||
.WithEvidence("SMTP connectivity test", eb => eb
|
||||
.Add("SmtpHost", smtpHost)
|
||||
.Add("SmtpPort", smtpPort.ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Error", "Connection timeout (10s)"))
|
||||
.WithCauses(
|
||||
"SMTP server not responding",
|
||||
"Network latency too high",
|
||||
"Firewall blocking connection",
|
||||
"Wrong host or port")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test DNS resolution",
|
||||
$"nslookup {smtpHost}",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Test port connectivity",
|
||||
$"nc -zv -w 10 {smtpHost} {smtpPort}",
|
||||
CommandType.Shell)
|
||||
.AddStep(3, "Check firewall rules",
|
||||
"# Ensure outbound connections to SMTP ports are allowed",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (SocketException ex)
|
||||
{
|
||||
return builder
|
||||
.Fail($"Cannot connect to SMTP server: {ex.Message}")
|
||||
.WithEvidence("SMTP connectivity test", eb => eb
|
||||
.Add("SmtpHost", smtpHost)
|
||||
.Add("SmtpPort", smtpPort.ToString(CultureInfo.InvariantCulture))
|
||||
.Add("SocketError", ex.SocketErrorCode.ToString())
|
||||
.Add("Error", ex.Message))
|
||||
.WithCauses(
|
||||
"DNS resolution failure",
|
||||
"SMTP server not running on specified port",
|
||||
"Network connectivity issue",
|
||||
"Firewall blocking connection")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test DNS resolution",
|
||||
$"nslookup {smtpHost}",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Test port connectivity",
|
||||
$"nc -zv {smtpHost} {smtpPort}",
|
||||
CommandType.Shell)
|
||||
.AddStep(3, "Verify SMTP host and port settings",
|
||||
"# Common SMTP ports: 25, 465 (SSL), 587 (STARTTLS)",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,232 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Diagnostics.HealthChecks;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the notification queue (Redis or NATS) is healthy.
|
||||
/// </summary>
|
||||
public sealed class NotifyQueueHealthCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.queue.health";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Notification Queue Health";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify notification event and delivery queues are healthy";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Fail;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "queue", "redis", "nats", "infrastructure"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
// Check if any queue configuration exists
|
||||
var queueConfig = context.Configuration.GetSection("Notify:Queue");
|
||||
var transportKind = queueConfig["Transport"] ?? queueConfig["Kind"];
|
||||
return !string.IsNullOrWhiteSpace(transportKind);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
var queueConfig = context.Configuration.GetSection("Notify:Queue");
|
||||
var transportKind = queueConfig["Transport"] ?? queueConfig["Kind"] ?? "unknown";
|
||||
|
||||
// Try to get the event queue health check from DI
|
||||
var eventQueueHealthCheck = context.Services.GetService<StellaOps.Notify.Queue.NotifyQueueHealthCheck>();
|
||||
var deliveryQueueHealthCheck = context.Services.GetService<StellaOps.Notify.Queue.NotifyDeliveryQueueHealthCheck>();
|
||||
|
||||
if (eventQueueHealthCheck == null && deliveryQueueHealthCheck == null)
|
||||
{
|
||||
return builder
|
||||
.Skip("No notification queue health checks registered")
|
||||
.WithEvidence("Queue health check status", eb => eb
|
||||
.Add("Transport", transportKind)
|
||||
.Add("EventQueueHealthCheck", "not registered")
|
||||
.Add("DeliveryQueueHealthCheck", "not registered"))
|
||||
.Build();
|
||||
}
|
||||
|
||||
var results = new List<(string Name, HealthCheckResult Result)>();
|
||||
|
||||
// Check event queue
|
||||
if (eventQueueHealthCheck != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var eventContext = new HealthCheckContext
|
||||
{
|
||||
Registration = new HealthCheckRegistration(
|
||||
"notify-event-queue",
|
||||
eventQueueHealthCheck,
|
||||
HealthStatus.Unhealthy,
|
||||
null)
|
||||
};
|
||||
|
||||
var eventResult = await eventQueueHealthCheck.CheckHealthAsync(eventContext, ct);
|
||||
results.Add(("EventQueue", eventResult));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
results.Add(("EventQueue", new HealthCheckResult(
|
||||
HealthStatus.Unhealthy,
|
||||
"Event queue health check threw exception",
|
||||
ex)));
|
||||
}
|
||||
}
|
||||
|
||||
// Check delivery queue
|
||||
if (deliveryQueueHealthCheck != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var deliveryContext = new HealthCheckContext
|
||||
{
|
||||
Registration = new HealthCheckRegistration(
|
||||
"notify-delivery-queue",
|
||||
deliveryQueueHealthCheck,
|
||||
HealthStatus.Unhealthy,
|
||||
null)
|
||||
};
|
||||
|
||||
var deliveryResult = await deliveryQueueHealthCheck.CheckHealthAsync(deliveryContext, ct);
|
||||
results.Add(("DeliveryQueue", deliveryResult));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
results.Add(("DeliveryQueue", new HealthCheckResult(
|
||||
HealthStatus.Unhealthy,
|
||||
"Delivery queue health check threw exception",
|
||||
ex)));
|
||||
}
|
||||
}
|
||||
|
||||
// Aggregate results
|
||||
var allHealthy = results.All(r => r.Result.Status == HealthStatus.Healthy);
|
||||
var anyUnhealthy = results.Any(r => r.Result.Status == HealthStatus.Unhealthy);
|
||||
|
||||
if (allHealthy)
|
||||
{
|
||||
return builder
|
||||
.Pass($"Notification queue ({transportKind}) is healthy")
|
||||
.WithEvidence("Queue health check results", eb =>
|
||||
{
|
||||
eb.Add("Transport", transportKind);
|
||||
foreach (var (name, result) in results)
|
||||
{
|
||||
eb.Add($"{name}Status", result.Status.ToString());
|
||||
if (!string.IsNullOrEmpty(result.Description))
|
||||
{
|
||||
eb.Add($"{name}Message", result.Description);
|
||||
}
|
||||
}
|
||||
})
|
||||
.Build();
|
||||
}
|
||||
|
||||
if (anyUnhealthy)
|
||||
{
|
||||
var unhealthyQueues = results
|
||||
.Where(r => r.Result.Status == HealthStatus.Unhealthy)
|
||||
.Select(r => r.Name)
|
||||
.ToList();
|
||||
|
||||
return builder
|
||||
.Fail($"Notification queue unhealthy: {string.Join(", ", unhealthyQueues)}")
|
||||
.WithEvidence("Queue health check results", eb =>
|
||||
{
|
||||
eb.Add("Transport", transportKind);
|
||||
foreach (var (name, result) in results)
|
||||
{
|
||||
eb.Add($"{name}Status", result.Status.ToString());
|
||||
if (!string.IsNullOrEmpty(result.Description))
|
||||
{
|
||||
eb.Add($"{name}Message", result.Description);
|
||||
}
|
||||
}
|
||||
})
|
||||
.WithCauses(
|
||||
"Queue server not running",
|
||||
"Network connectivity issues",
|
||||
"Authentication failure",
|
||||
"Incorrect connection string")
|
||||
.WithRemediation(rb =>
|
||||
{
|
||||
if (transportKind.Equals("redis", StringComparison.OrdinalIgnoreCase) ||
|
||||
transportKind.Equals("valkey", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
rb.AddStep(1, "Check Redis/Valkey server status",
|
||||
"redis-cli ping",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Verify Redis connection settings",
|
||||
"# Check Notify:Queue:Redis:ConnectionString in configuration",
|
||||
CommandType.Manual)
|
||||
.AddStep(3, "Check Redis server logs",
|
||||
"docker logs <redis-container-name>",
|
||||
CommandType.Shell);
|
||||
}
|
||||
else if (transportKind.Equals("nats", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
rb.AddStep(1, "Check NATS server status",
|
||||
"nats server ping",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Verify NATS connection settings",
|
||||
"# Check Notify:Queue:Nats:Url in configuration",
|
||||
CommandType.Manual)
|
||||
.AddStep(3, "Check NATS server logs",
|
||||
"docker logs <nats-container-name>",
|
||||
CommandType.Shell);
|
||||
}
|
||||
else
|
||||
{
|
||||
rb.AddStep(1, "Verify queue transport configuration",
|
||||
"# Check Notify:Queue:Transport setting",
|
||||
CommandType.Manual);
|
||||
}
|
||||
})
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
|
||||
// Degraded state
|
||||
return builder
|
||||
.Warn("Notification queue in degraded state")
|
||||
.WithEvidence("Queue health check results", eb =>
|
||||
{
|
||||
eb.Add("Transport", transportKind);
|
||||
foreach (var (name, result) in results)
|
||||
{
|
||||
eb.Add($"{name}Status", result.Status.ToString());
|
||||
if (!string.IsNullOrEmpty(result.Description))
|
||||
{
|
||||
eb.Add($"{name}Message", result.Description);
|
||||
}
|
||||
}
|
||||
})
|
||||
.WithCauses(
|
||||
"Queue server experiencing issues",
|
||||
"High latency",
|
||||
"Resource constraints")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Check queue server health",
|
||||
"# Review queue server metrics and logs",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
using System.Globalization;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using StellaOps.Notify.Engine;
|
||||
using StellaOps.Notify.Models;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if Slack notification channels are properly configured.
|
||||
/// </summary>
|
||||
public sealed class SlackConfiguredCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.slack.configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Slack Configuration";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify Slack notification channel is properly configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "slack", "quick", "configuration"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromMilliseconds(100);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
// Check if Slack is configured in settings
|
||||
var slackConfig = context.Configuration.GetSection("Notify:Channels:Slack");
|
||||
return slackConfig.Exists();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
var slackConfig = context.Configuration.GetSection("Notify:Channels:Slack");
|
||||
|
||||
var webhookUrl = slackConfig["WebhookUrl"];
|
||||
var channel = slackConfig["Channel"];
|
||||
var enabled = slackConfig.GetValue<bool>("Enabled", true);
|
||||
|
||||
var hasWebhook = !string.IsNullOrWhiteSpace(webhookUrl);
|
||||
var hasChannel = !string.IsNullOrWhiteSpace(channel);
|
||||
|
||||
if (!hasWebhook)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Fail("Slack webhook URL is not configured")
|
||||
.WithEvidence("Slack configuration status", eb => eb
|
||||
.Add("WebhookUrl", "(not set)")
|
||||
.Add("Channel", hasChannel ? channel! : "(not set)")
|
||||
.Add("Enabled", enabled.ToString()))
|
||||
.WithCauses(
|
||||
"Slack webhook URL not set in configuration",
|
||||
"Missing Notify:Channels:Slack:WebhookUrl setting",
|
||||
"Environment variable not bound to configuration")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Add Slack webhook URL to configuration",
|
||||
"# Add to appsettings.json or environment:\n" +
|
||||
"# \"Notify\": { \"Channels\": { \"Slack\": { \"WebhookUrl\": \"https://hooks.slack.com/services/...\" } } }",
|
||||
CommandType.FileEdit)
|
||||
.AddStep(2, "Or set via environment variable",
|
||||
"export Notify__Channels__Slack__WebhookUrl=\"https://hooks.slack.com/services/YOUR/WEBHOOK/URL\"",
|
||||
CommandType.Shell)
|
||||
.WithSafetyNote("Slack webhook URLs are secrets - store in a secrets manager"))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!enabled)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("Slack channel is configured but disabled")
|
||||
.WithEvidence("Slack configuration status", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Channel", hasChannel ? channel! : "(default)")
|
||||
.Add("Enabled", "false"))
|
||||
.WithCauses(
|
||||
"Slack notifications explicitly disabled in configuration")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Enable Slack notifications",
|
||||
"# Set Notify:Channels:Slack:Enabled to true in configuration",
|
||||
CommandType.FileEdit))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
return Task.FromResult(builder
|
||||
.Pass("Slack notification channel is properly configured")
|
||||
.WithEvidence("Slack configuration status", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Channel", hasChannel ? channel! : "(default)")
|
||||
.Add("Enabled", "true"))
|
||||
.Build());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,153 @@
|
||||
using System.Globalization;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the configured Slack webhook endpoint is reachable.
|
||||
/// </summary>
|
||||
public sealed class SlackConnectivityCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.slack.connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Slack Connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify Slack webhook endpoint is reachable";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "slack", "connectivity", "network"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var webhookUrl = context.Configuration["Notify:Channels:Slack:WebhookUrl"];
|
||||
return !string.IsNullOrWhiteSpace(webhookUrl) &&
|
||||
Uri.TryCreate(webhookUrl, UriKind.Absolute, out _);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var webhookUrl = context.Configuration["Notify:Channels:Slack:WebhookUrl"]!;
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
|
||||
try
|
||||
{
|
||||
var httpClientFactory = context.Services.GetRequiredService<IHttpClientFactory>();
|
||||
var httpClient = httpClientFactory.CreateClient("DoctorHealthCheck");
|
||||
httpClient.Timeout = TimeSpan.FromSeconds(10);
|
||||
|
||||
// Send a minimal test payload to Slack
|
||||
// Note: This won't actually post a message if the payload is invalid,
|
||||
// but it will verify the endpoint is reachable and responds
|
||||
var testPayload = new { text = "" }; // Empty text won't post but validates endpoint
|
||||
var content = new StringContent(
|
||||
JsonSerializer.Serialize(testPayload),
|
||||
Encoding.UTF8,
|
||||
"application/json");
|
||||
|
||||
var response = await httpClient.PostAsync(webhookUrl, content, ct);
|
||||
var responseBody = await response.Content.ReadAsStringAsync(ct);
|
||||
|
||||
// Slack returns "no_text" for empty messages, which proves connectivity
|
||||
if (response.IsSuccessStatusCode || responseBody.Contains("no_text", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return builder
|
||||
.Pass("Slack webhook endpoint is reachable")
|
||||
.WithEvidence("Slack connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("StatusCode", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Response", responseBody.Length > 100 ? responseBody[..100] + "..." : responseBody))
|
||||
.Build();
|
||||
}
|
||||
|
||||
return builder
|
||||
.Warn($"Slack webhook returned unexpected response: {response.StatusCode}")
|
||||
.WithEvidence("Slack connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("StatusCode", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Response", responseBody.Length > 200 ? responseBody[..200] + "..." : responseBody))
|
||||
.WithCauses(
|
||||
"Invalid or expired webhook URL",
|
||||
"Slack workspace configuration changed",
|
||||
"Webhook URL revoked or regenerated",
|
||||
"Rate limiting by Slack")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Verify webhook URL in Slack App settings",
|
||||
"# Go to https://api.slack.com/apps -> Your App -> Incoming Webhooks",
|
||||
CommandType.Manual)
|
||||
.AddStep(2, "Test webhook manually",
|
||||
$"curl -X POST -H 'Content-type: application/json' --data '{{\"text\":\"Doctor test\"}}' '{DoctorPluginContext.Redact(webhookUrl)}'",
|
||||
CommandType.Shell)
|
||||
.AddStep(3, "Regenerate webhook if needed",
|
||||
"# Create a new webhook URL in Slack and update configuration",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
return builder
|
||||
.Fail("Slack webhook connection timed out")
|
||||
.WithEvidence("Slack connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Error", "Connection timeout (10s)"))
|
||||
.WithCauses(
|
||||
"Network connectivity issue to Slack",
|
||||
"Firewall blocking outbound HTTPS",
|
||||
"Proxy configuration required",
|
||||
"Slack service degradation")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Check network connectivity",
|
||||
"curl -v https://hooks.slack.com/",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Check Slack status",
|
||||
"# Visit https://status.slack.com for service status",
|
||||
CommandType.Manual)
|
||||
.AddStep(3, "Verify proxy settings if applicable",
|
||||
"echo $HTTP_PROXY $HTTPS_PROXY",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
return builder
|
||||
.Fail($"Cannot reach Slack webhook: {ex.Message}")
|
||||
.WithEvidence("Slack connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Error", ex.Message))
|
||||
.WithCauses(
|
||||
"DNS resolution failure",
|
||||
"Network connectivity issue",
|
||||
"TLS/SSL certificate problem",
|
||||
"Firewall blocking connection")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test DNS resolution",
|
||||
"nslookup hooks.slack.com",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Test HTTPS connectivity",
|
||||
"curl -v https://hooks.slack.com/",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if Microsoft Teams notification channels are properly configured.
|
||||
/// </summary>
|
||||
public sealed class TeamsConfiguredCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.teams.configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Teams Configuration";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify Microsoft Teams notification channel is properly configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "teams", "quick", "configuration"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromMilliseconds(100);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var teamsConfig = context.Configuration.GetSection("Notify:Channels:Teams");
|
||||
return teamsConfig.Exists();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
var teamsConfig = context.Configuration.GetSection("Notify:Channels:Teams");
|
||||
|
||||
var webhookUrl = teamsConfig["WebhookUrl"];
|
||||
var enabled = teamsConfig.GetValue<bool>("Enabled", true);
|
||||
|
||||
var hasWebhook = !string.IsNullOrWhiteSpace(webhookUrl);
|
||||
var isValidUrl = hasWebhook && Uri.TryCreate(webhookUrl, UriKind.Absolute, out var uri) &&
|
||||
(uri.Host.Contains("webhook.office.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
uri.Host.Contains("microsoft.com", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (!hasWebhook)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Fail("Teams webhook URL is not configured")
|
||||
.WithEvidence("Teams configuration status", eb => eb
|
||||
.Add("WebhookUrl", "(not set)")
|
||||
.Add("Enabled", enabled.ToString()))
|
||||
.WithCauses(
|
||||
"Teams webhook URL not set in configuration",
|
||||
"Missing Notify:Channels:Teams:WebhookUrl setting",
|
||||
"Environment variable not bound to configuration")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Create Teams Incoming Webhook",
|
||||
"# In Teams: Channel > Connectors > Incoming Webhook > Create",
|
||||
CommandType.Manual)
|
||||
.AddStep(2, "Add webhook URL to configuration",
|
||||
"# Add to appsettings.json:\n" +
|
||||
"# \"Notify\": { \"Channels\": { \"Teams\": { \"WebhookUrl\": \"https://...webhook.office.com/...\" } } }",
|
||||
CommandType.FileEdit)
|
||||
.AddStep(3, "Or set via environment variable",
|
||||
"export Notify__Channels__Teams__WebhookUrl=\"https://YOUR_WEBHOOK_URL\"",
|
||||
CommandType.Shell)
|
||||
.WithSafetyNote("Teams webhook URLs are secrets - store securely"))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!isValidUrl)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("Teams webhook URL format appears invalid")
|
||||
.WithEvidence("Teams configuration status", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Enabled", enabled.ToString())
|
||||
.Add("ValidationNote", "Expected webhook.office.com or microsoft.com domain"))
|
||||
.WithCauses(
|
||||
"Webhook URL is not from Microsoft domain",
|
||||
"Malformed URL in configuration",
|
||||
"Legacy webhook URL format")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Verify webhook URL",
|
||||
"# Teams webhook URLs typically look like:\n# https://YOUR_TENANT.webhook.office.com/webhookb2/...",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!enabled)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("Teams channel is configured but disabled")
|
||||
.WithEvidence("Teams configuration status", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Enabled", "false"))
|
||||
.WithCauses(
|
||||
"Teams notifications explicitly disabled in configuration")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Enable Teams notifications",
|
||||
"# Set Notify:Channels:Teams:Enabled to true in configuration",
|
||||
CommandType.FileEdit))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
return Task.FromResult(builder
|
||||
.Pass("Teams notification channel is properly configured")
|
||||
.WithEvidence("Teams configuration status", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Enabled", "true"))
|
||||
.Build());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
using System.Globalization;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the configured Microsoft Teams webhook endpoint is reachable.
|
||||
/// </summary>
|
||||
public sealed class TeamsConnectivityCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.teams.connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Teams Connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify Microsoft Teams webhook endpoint is reachable";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "teams", "connectivity", "network"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var webhookUrl = context.Configuration["Notify:Channels:Teams:WebhookUrl"];
|
||||
return !string.IsNullOrWhiteSpace(webhookUrl) &&
|
||||
Uri.TryCreate(webhookUrl, UriKind.Absolute, out _);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var webhookUrl = context.Configuration["Notify:Channels:Teams:WebhookUrl"]!;
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
|
||||
try
|
||||
{
|
||||
var httpClientFactory = context.Services.GetRequiredService<IHttpClientFactory>();
|
||||
var httpClient = httpClientFactory.CreateClient("DoctorHealthCheck");
|
||||
httpClient.Timeout = TimeSpan.FromSeconds(10);
|
||||
|
||||
// Teams Adaptive Card format for connectivity test
|
||||
// Using a minimal card that validates the endpoint
|
||||
var testPayload = new
|
||||
{
|
||||
type = "message",
|
||||
attachments = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
contentType = "application/vnd.microsoft.card.adaptive",
|
||||
contentUrl = (string?)null,
|
||||
content = new
|
||||
{
|
||||
type = "AdaptiveCard",
|
||||
body = Array.Empty<object>(),
|
||||
version = "1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var content = new StringContent(
|
||||
JsonSerializer.Serialize(testPayload),
|
||||
Encoding.UTF8,
|
||||
"application/json");
|
||||
|
||||
var response = await httpClient.PostAsync(webhookUrl, content, ct);
|
||||
var responseBody = await response.Content.ReadAsStringAsync(ct);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
return builder
|
||||
.Pass("Teams webhook endpoint is reachable")
|
||||
.WithEvidence("Teams connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("StatusCode", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Response", responseBody.Length > 100 ? responseBody[..100] + "..." : responseBody))
|
||||
.Build();
|
||||
}
|
||||
|
||||
return builder
|
||||
.Warn($"Teams webhook returned unexpected response: {response.StatusCode}")
|
||||
.WithEvidence("Teams connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("StatusCode", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
|
||||
.Add("Response", responseBody.Length > 200 ? responseBody[..200] + "..." : responseBody))
|
||||
.WithCauses(
|
||||
"Invalid or expired webhook URL",
|
||||
"Teams connector disabled or deleted",
|
||||
"Webhook URL revoked",
|
||||
"Microsoft 365 tenant configuration changed")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Verify webhook in Teams",
|
||||
"# Go to Teams channel > Connectors > Configured > Incoming Webhook",
|
||||
CommandType.Manual)
|
||||
.AddStep(2, "Test webhook manually",
|
||||
$"curl -H 'Content-Type: application/json' -d '{{\"text\":\"Doctor test\"}}' '{DoctorPluginContext.Redact(webhookUrl)}'",
|
||||
CommandType.Shell)
|
||||
.AddStep(3, "Recreate webhook if needed",
|
||||
"# Delete and recreate the Incoming Webhook connector in Teams",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
return builder
|
||||
.Fail("Teams webhook connection timed out")
|
||||
.WithEvidence("Teams connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Error", "Connection timeout (10s)"))
|
||||
.WithCauses(
|
||||
"Network connectivity issue to Microsoft",
|
||||
"Firewall blocking outbound HTTPS",
|
||||
"Proxy configuration required",
|
||||
"Microsoft 365 service degradation")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Check network connectivity",
|
||||
"curl -v https://webhook.office.com/",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Check Microsoft 365 status",
|
||||
"# Visit https://status.office.com for service status",
|
||||
CommandType.Manual)
|
||||
.AddStep(3, "Verify proxy settings if applicable",
|
||||
"echo $HTTP_PROXY $HTTPS_PROXY",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
return builder
|
||||
.Fail($"Cannot reach Teams webhook: {ex.Message}")
|
||||
.WithEvidence("Teams connectivity test", eb => eb
|
||||
.Add("WebhookUrl", DoctorPluginContext.Redact(webhookUrl))
|
||||
.Add("Error", ex.Message))
|
||||
.WithCauses(
|
||||
"DNS resolution failure",
|
||||
"Network connectivity issue",
|
||||
"TLS/SSL certificate problem",
|
||||
"Firewall blocking connection")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test DNS resolution",
|
||||
"nslookup webhook.office.com",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Test HTTPS connectivity",
|
||||
"curl -v https://webhook.office.com/",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,128 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if generic webhook notification channels are properly configured.
|
||||
/// </summary>
|
||||
public sealed class WebhookConfiguredCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.webhook.configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Webhook Configuration";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify generic webhook notification channel is properly configured";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "webhook", "quick", "configuration"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromMilliseconds(100);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var webhookConfig = context.Configuration.GetSection("Notify:Channels:Webhook");
|
||||
return webhookConfig.Exists();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
var webhookConfig = context.Configuration.GetSection("Notify:Channels:Webhook");
|
||||
|
||||
var url = webhookConfig["Url"] ?? webhookConfig["Endpoint"];
|
||||
var enabled = webhookConfig.GetValue<bool>("Enabled", true);
|
||||
var method = webhookConfig["Method"] ?? "POST";
|
||||
var contentType = webhookConfig["ContentType"] ?? "application/json";
|
||||
|
||||
var hasUrl = !string.IsNullOrWhiteSpace(url);
|
||||
var isValidUrl = hasUrl && Uri.TryCreate(url, UriKind.Absolute, out var uri) &&
|
||||
(uri.Scheme == Uri.UriSchemeHttp || uri.Scheme == Uri.UriSchemeHttps);
|
||||
|
||||
if (!hasUrl)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Fail("Webhook URL is not configured")
|
||||
.WithEvidence("Webhook configuration status", eb => eb
|
||||
.Add("Url", "(not set)")
|
||||
.Add("Enabled", enabled.ToString())
|
||||
.Add("Method", method)
|
||||
.Add("ContentType", contentType))
|
||||
.WithCauses(
|
||||
"Webhook URL not set in configuration",
|
||||
"Missing Notify:Channels:Webhook:Url setting",
|
||||
"Environment variable not bound to configuration")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Add webhook URL to configuration",
|
||||
"# Add to appsettings.json:\n" +
|
||||
"# \"Notify\": { \"Channels\": { \"Webhook\": { \"Url\": \"https://your-endpoint/webhook\" } } }",
|
||||
CommandType.FileEdit)
|
||||
.AddStep(2, "Or set via environment variable",
|
||||
"export Notify__Channels__Webhook__Url=\"https://your-endpoint/webhook\"",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!isValidUrl)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Fail("Webhook URL format is invalid")
|
||||
.WithEvidence("Webhook configuration status", eb => eb
|
||||
.Add("Url", url!)
|
||||
.Add("Enabled", enabled.ToString())
|
||||
.Add("ValidationError", "URL must be a valid HTTP or HTTPS URL"))
|
||||
.WithCauses(
|
||||
"Malformed URL in configuration",
|
||||
"Missing protocol (http:// or https://)",
|
||||
"Invalid characters in URL")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Fix URL format",
|
||||
"# Ensure URL starts with http:// or https:// and is properly encoded",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
if (!enabled)
|
||||
{
|
||||
return Task.FromResult(builder
|
||||
.Warn("Webhook channel is configured but disabled")
|
||||
.WithEvidence("Webhook configuration status", eb => eb
|
||||
.Add("Url", DoctorPluginContext.Redact(url))
|
||||
.Add("Enabled", "false")
|
||||
.Add("Method", method)
|
||||
.Add("ContentType", contentType))
|
||||
.WithCauses(
|
||||
"Webhook notifications explicitly disabled in configuration")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Enable webhook notifications",
|
||||
"# Set Notify:Channels:Webhook:Enabled to true in configuration",
|
||||
CommandType.FileEdit))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build());
|
||||
}
|
||||
|
||||
return Task.FromResult(builder
|
||||
.Pass("Webhook notification channel is properly configured")
|
||||
.WithEvidence("Webhook configuration status", eb => eb
|
||||
.Add("Url", DoctorPluginContext.Redact(url))
|
||||
.Add("Enabled", "true")
|
||||
.Add("Method", method)
|
||||
.Add("ContentType", contentType))
|
||||
.Build());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,166 @@
|
||||
using System.Globalization;
|
||||
using System.Net.Http;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the configured webhook endpoint is reachable.
|
||||
/// </summary>
|
||||
public sealed class WebhookConnectivityCheck : IDoctorCheck
|
||||
{
|
||||
private const string PluginId = "stellaops.doctor.notify";
|
||||
private const string CategoryName = "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string CheckId => "check.notify.webhook.connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "Webhook Connectivity";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Description => "Verify generic webhook endpoint is reachable";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorSeverity DefaultSeverity => DoctorSeverity.Warn;
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<string> Tags => ["notify", "webhook", "connectivity", "network"];
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSpan EstimatedDuration => TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanRun(DoctorPluginContext context)
|
||||
{
|
||||
var url = context.Configuration["Notify:Channels:Webhook:Url"] ??
|
||||
context.Configuration["Notify:Channels:Webhook:Endpoint"];
|
||||
return !string.IsNullOrWhiteSpace(url) &&
|
||||
Uri.TryCreate(url, UriKind.Absolute, out _);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DoctorCheckResult> RunAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
var url = context.Configuration["Notify:Channels:Webhook:Url"] ??
|
||||
context.Configuration["Notify:Channels:Webhook:Endpoint"]!;
|
||||
var builder = context.CreateResult(CheckId, PluginId, CategoryName);
|
||||
|
||||
try
|
||||
{
|
||||
var httpClientFactory = context.Services.GetRequiredService<IHttpClientFactory>();
|
||||
var httpClient = httpClientFactory.CreateClient("DoctorHealthCheck");
|
||||
httpClient.Timeout = TimeSpan.FromSeconds(10);
|
||||
|
||||
// Use HEAD request first to avoid side effects, fall back to GET
|
||||
var uri = new Uri(url);
|
||||
HttpResponseMessage? response = null;
|
||||
|
||||
try
|
||||
{
|
||||
var headRequest = new HttpRequestMessage(HttpMethod.Head, uri);
|
||||
response = await httpClient.SendAsync(headRequest, ct);
|
||||
}
|
||||
catch (HttpRequestException)
|
||||
{
|
||||
// HEAD might not be supported, try OPTIONS
|
||||
var optionsRequest = new HttpRequestMessage(HttpMethod.Options, uri);
|
||||
response = await httpClient.SendAsync(optionsRequest, ct);
|
||||
}
|
||||
|
||||
// For connectivity test, any response (even 4xx for auth required) means endpoint is reachable
|
||||
var isReachable = (int)response.StatusCode < 500;
|
||||
|
||||
if (isReachable)
|
||||
{
|
||||
var diagnosis = response.IsSuccessStatusCode
|
||||
? "Webhook endpoint is reachable and responding"
|
||||
: $"Webhook endpoint is reachable (status: {response.StatusCode})";
|
||||
|
||||
var severity = response.IsSuccessStatusCode ? DoctorSeverity.Pass : DoctorSeverity.Info;
|
||||
|
||||
return builder
|
||||
.WithSeverity(severity, diagnosis)
|
||||
.WithEvidence("Webhook connectivity test", eb => eb
|
||||
.Add("Url", DoctorPluginContext.Redact(url))
|
||||
.Add("StatusCode", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture))
|
||||
.Add("TestMethod", "HEAD/OPTIONS")
|
||||
.Add("Note", response.IsSuccessStatusCode
|
||||
? "Endpoint responding normally"
|
||||
: "Endpoint reachable but may require authentication"))
|
||||
.Build();
|
||||
}
|
||||
|
||||
return builder
|
||||
.Warn($"Webhook endpoint returned server error: {response.StatusCode}")
|
||||
.WithEvidence("Webhook connectivity test", eb => eb
|
||||
.Add("Url", DoctorPluginContext.Redact(url))
|
||||
.Add("StatusCode", ((int)response.StatusCode).ToString(CultureInfo.InvariantCulture)))
|
||||
.WithCauses(
|
||||
"Webhook endpoint server is experiencing issues",
|
||||
"Endpoint service is down",
|
||||
"Backend service unavailable")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Check webhook endpoint status",
|
||||
$"curl -I {DoctorPluginContext.Redact(url)}",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Verify endpoint service is running",
|
||||
"# Check the service hosting your webhook endpoint",
|
||||
CommandType.Manual)
|
||||
.AddStep(3, "Check endpoint logs",
|
||||
"# Review logs on the webhook endpoint server",
|
||||
CommandType.Manual))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
return builder
|
||||
.Fail("Webhook endpoint connection timed out")
|
||||
.WithEvidence("Webhook connectivity test", eb => eb
|
||||
.Add("Url", DoctorPluginContext.Redact(url))
|
||||
.Add("Error", "Connection timeout (10s)"))
|
||||
.WithCauses(
|
||||
"Endpoint server not responding",
|
||||
"Network connectivity issue",
|
||||
"Firewall blocking connection",
|
||||
"DNS resolution slow or failing")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test basic connectivity",
|
||||
$"curl -v --max-time 10 {DoctorPluginContext.Redact(url)}",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Check DNS resolution",
|
||||
$"nslookup {new Uri(url).Host}",
|
||||
CommandType.Shell)
|
||||
.AddStep(3, "Test port connectivity",
|
||||
$"nc -zv {new Uri(url).Host} {(new Uri(url).Port > 0 ? new Uri(url).Port : (new Uri(url).Scheme == "https" ? 443 : 80))}",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
return builder
|
||||
.Fail($"Cannot reach webhook endpoint: {ex.Message}")
|
||||
.WithEvidence("Webhook connectivity test", eb => eb
|
||||
.Add("Url", DoctorPluginContext.Redact(url))
|
||||
.Add("Error", ex.Message))
|
||||
.WithCauses(
|
||||
"DNS resolution failure",
|
||||
"Network connectivity issue",
|
||||
"TLS/SSL certificate problem",
|
||||
"Invalid URL")
|
||||
.WithRemediation(rb => rb
|
||||
.AddStep(1, "Test DNS resolution",
|
||||
$"nslookup {new Uri(url).Host}",
|
||||
CommandType.Shell)
|
||||
.AddStep(2, "Test connectivity",
|
||||
$"curl -v {DoctorPluginContext.Redact(url)}",
|
||||
CommandType.Shell))
|
||||
.WithVerification($"stella doctor --check {CheckId}")
|
||||
.Build();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using StellaOps.Notify.Engine;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify;
|
||||
|
||||
/// <summary>
|
||||
/// Doctor plugin for notification channel diagnostics (Slack, Teams, Email, Webhooks, Queue).
|
||||
/// </summary>
|
||||
public sealed class NotifyDoctorPlugin : IDoctorPlugin
|
||||
{
|
||||
private static readonly Version PluginVersion = new(1, 0, 0);
|
||||
private static readonly Version MinVersion = new(1, 0, 0);
|
||||
|
||||
/// <inheritdoc />
|
||||
public string PluginId => "stellaops.doctor.notify";
|
||||
|
||||
/// <inheritdoc />
|
||||
public string DisplayName => "Notifications";
|
||||
|
||||
/// <inheritdoc />
|
||||
public DoctorCategory Category => DoctorCategory.Notify;
|
||||
|
||||
/// <inheritdoc />
|
||||
public Version Version => PluginVersion;
|
||||
|
||||
/// <inheritdoc />
|
||||
public Version MinEngineVersion => MinVersion;
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool IsAvailable(IServiceProvider services)
|
||||
{
|
||||
// Plugin is available if any notification health providers are registered
|
||||
var providers = services.GetService<IEnumerable<INotifyChannelHealthProvider>>();
|
||||
return providers?.Any() == true;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public IReadOnlyList<IDoctorCheck> GetChecks(DoctorPluginContext context)
|
||||
{
|
||||
return new IDoctorCheck[]
|
||||
{
|
||||
// Slack checks
|
||||
new SlackConfiguredCheck(),
|
||||
new SlackConnectivityCheck(),
|
||||
|
||||
// Teams checks
|
||||
new TeamsConfiguredCheck(),
|
||||
new TeamsConnectivityCheck(),
|
||||
|
||||
// Webhook checks
|
||||
new WebhookConfiguredCheck(),
|
||||
new WebhookConnectivityCheck(),
|
||||
|
||||
// Email checks
|
||||
new EmailConfiguredCheck(),
|
||||
new EmailConnectivityCheck(),
|
||||
|
||||
// Queue health
|
||||
new NotifyQueueHealthCheck()
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task InitializeAsync(DoctorPluginContext context, CancellationToken ct)
|
||||
{
|
||||
// No initialization required
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<RootNamespace>StellaOps.Doctor.Plugin.Notify</RootNamespace>
|
||||
<Description>Notification channel checks for Stella Ops Doctor diagnostics - Slack, Teams, Email, Webhooks, Queue</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Doctor\StellaOps.Doctor.csproj" />
|
||||
<ProjectReference Include="..\..\..\Notify\__Libraries\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" />
|
||||
<ProjectReference Include="..\..\..\Notify\__Libraries\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" />
|
||||
<ProjectReference Include="..\..\..\Notify\__Libraries\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
70
src/Doctor/__Plugins/StellaOps.Doctor.Plugin.Notify/TASKS.md
Normal file
70
src/Doctor/__Plugins/StellaOps.Doctor.Plugin.Notify/TASKS.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# StellaOps.Doctor.Plugin.Notify
|
||||
|
||||
## Overview
|
||||
|
||||
Doctor plugin for notification channel diagnostics - validates and tests Slack, Teams, Email, Webhook, and Queue configurations.
|
||||
|
||||
## Checks
|
||||
|
||||
| Check ID | Name | Description | Severity |
|
||||
|----------|------|-------------|----------|
|
||||
| `check.notify.slack.configured` | Slack Configuration | Validates Slack webhook URL and settings | Warn |
|
||||
| `check.notify.slack.connectivity` | Slack Connectivity | Tests actual connectivity to Slack webhook | Warn |
|
||||
| `check.notify.teams.configured` | Teams Configuration | Validates Teams webhook URL and settings | Warn |
|
||||
| `check.notify.teams.connectivity` | Teams Connectivity | Tests actual connectivity to Teams webhook | Warn |
|
||||
| `check.notify.webhook.configured` | Webhook Configuration | Validates generic webhook URL and settings | Warn |
|
||||
| `check.notify.webhook.connectivity` | Webhook Connectivity | Tests actual connectivity to webhook endpoint | Warn |
|
||||
| `check.notify.email.configured` | Email Configuration | Validates SMTP host, port, and sender settings | Warn |
|
||||
| `check.notify.email.connectivity` | Email Connectivity | Tests TCP connectivity to SMTP server | Warn |
|
||||
| `check.notify.queue.health` | Queue Health | Wraps existing Notify queue health checks | Critical |
|
||||
|
||||
## Configuration Paths
|
||||
|
||||
### Slack
|
||||
- `Notify:Channels:Slack:WebhookUrl` - Slack incoming webhook URL
|
||||
- `Notify:Channels:Slack:Enabled` - Enable/disable channel
|
||||
- `Notify:Channels:Slack:Channel` - Default channel override
|
||||
|
||||
### Teams
|
||||
- `Notify:Channels:Teams:WebhookUrl` - Teams incoming webhook URL
|
||||
- `Notify:Channels:Teams:Enabled` - Enable/disable channel
|
||||
|
||||
### Webhook
|
||||
- `Notify:Channels:Webhook:Url` or `Endpoint` - Webhook endpoint URL
|
||||
- `Notify:Channels:Webhook:Enabled` - Enable/disable channel
|
||||
- `Notify:Channels:Webhook:Method` - HTTP method (default: POST)
|
||||
- `Notify:Channels:Webhook:ContentType` - Content type (default: application/json)
|
||||
|
||||
### Email
|
||||
- `Notify:Channels:Email:SmtpHost` or `Host` - SMTP server hostname
|
||||
- `Notify:Channels:Email:SmtpPort` or `Port` - SMTP port (25/465/587)
|
||||
- `Notify:Channels:Email:FromAddress` or `From` - Sender email address
|
||||
- `Notify:Channels:Email:Enabled` - Enable/disable channel
|
||||
- `Notify:Channels:Email:UseSsl` - Use SSL/TLS
|
||||
- `Notify:Channels:Email:Username` - SMTP credentials
|
||||
|
||||
### Queue
|
||||
- `Notify:Queue:Transport` or `Kind` - Queue transport type (redis/nats)
|
||||
- `Notify:Queue:Redis:ConnectionString` - Redis connection string
|
||||
- `Notify:Queue:Nats:Url` - NATS server URL
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `StellaOps.Doctor` - Core Doctor plugin infrastructure
|
||||
- `StellaOps.Notify.Engine` - Notify channel health provider interfaces
|
||||
- `StellaOps.Notify.Models` - Notify data models
|
||||
- `StellaOps.Notify.Queue` - Queue health check implementations
|
||||
|
||||
## Status
|
||||
|
||||
- [x] Plugin skeleton
|
||||
- [x] Slack configuration check
|
||||
- [x] Slack connectivity check
|
||||
- [x] Teams configuration check
|
||||
- [x] Teams connectivity check
|
||||
- [x] Webhook configuration check
|
||||
- [x] Webhook connectivity check
|
||||
- [x] Email configuration check
|
||||
- [x] Email connectivity check
|
||||
- [x] Queue health check wrapper
|
||||
- [x] Unit tests
|
||||
@@ -11,7 +11,7 @@
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\..\__Libraries\StellaOps.Doctor\StellaOps.Doctor.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Doctor\StellaOps.Doctor.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Tests.Checks;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
public class EmailConfiguredCheckTests
|
||||
{
|
||||
private readonly EmailConfiguredCheck _check = new();
|
||||
|
||||
[Fact]
|
||||
public void CheckId_ReturnsExpectedValue()
|
||||
{
|
||||
// Assert
|
||||
_check.CheckId.Should().Be("check.notify.email.configured");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsFalse_WhenEmailNotConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>());
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsTrue_WhenEmailSectionExists()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:SmtpHost"] = "smtp.example.com"
|
||||
});
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Fails_WhenSmtpHostNotSet()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:SmtpPort"] = "587"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Fail);
|
||||
result.Diagnosis.Should().Contain("host");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenSmtpPortInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:SmtpHost"] = "smtp.example.com",
|
||||
["Notify:Channels:Email:SmtpPort"] = "0",
|
||||
["Notify:Channels:Email:FromAddress"] = "noreply@example.com"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("port");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenFromAddressMissing()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:SmtpHost"] = "smtp.example.com",
|
||||
["Notify:Channels:Email:SmtpPort"] = "587"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("From");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:SmtpHost"] = "smtp.example.com",
|
||||
["Notify:Channels:Email:SmtpPort"] = "587",
|
||||
["Notify:Channels:Email:FromAddress"] = "noreply@example.com",
|
||||
["Notify:Channels:Email:Enabled"] = "false"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("disabled");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Passes_WhenProperlyConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:SmtpHost"] = "smtp.example.com",
|
||||
["Notify:Channels:Email:SmtpPort"] = "587",
|
||||
["Notify:Channels:Email:FromAddress"] = "noreply@example.com",
|
||||
["Notify:Channels:Email:Enabled"] = "true",
|
||||
["Notify:Channels:Email:UseSsl"] = "true"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_SupportsAlternativeHostKey()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Email:Host"] = "smtp.example.com",
|
||||
["Notify:Channels:Email:Port"] = "587",
|
||||
["Notify:Channels:Email:From"] = "noreply@example.com"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tags_ContainsExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
_check.Tags.Should().Contain("notify");
|
||||
_check.Tags.Should().Contain("email");
|
||||
_check.Tags.Should().Contain("smtp");
|
||||
_check.Tags.Should().Contain("configuration");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultSeverity_IsWarn()
|
||||
{
|
||||
// Assert
|
||||
_check.DefaultSeverity.Should().Be(DoctorSeverity.Warn);
|
||||
}
|
||||
|
||||
private static DoctorPluginContext CreateContext(Dictionary<string, string?> configValues)
|
||||
{
|
||||
var config = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(configValues)
|
||||
.Build();
|
||||
|
||||
return new DoctorPluginContext
|
||||
{
|
||||
Services = new ServiceCollection().BuildServiceProvider(),
|
||||
Configuration = config,
|
||||
TimeProvider = TimeProvider.System,
|
||||
Logger = NullLogger.Instance,
|
||||
EnvironmentName = "Test",
|
||||
PluginConfig = config.GetSection("Doctor:Plugins")
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Tests.Checks;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
public class NotifyQueueHealthCheckTests
|
||||
{
|
||||
private readonly NotifyQueueHealthCheck _check = new();
|
||||
|
||||
[Fact]
|
||||
public void CheckId_ReturnsExpectedValue()
|
||||
{
|
||||
// Assert
|
||||
_check.CheckId.Should().Be("check.notify.queue.health");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsFalse_WhenQueueNotConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>());
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsTrue_WhenQueueTransportConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Queue:Transport"] = "redis"
|
||||
});
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsTrue_WhenQueueKindConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Queue:Kind"] = "nats"
|
||||
});
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Skips_WhenNoHealthChecksRegistered()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Queue:Transport"] = "redis"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Skip);
|
||||
result.Diagnosis.Should().Contain("registered");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tags_ContainsExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
_check.Tags.Should().Contain("notify");
|
||||
_check.Tags.Should().Contain("queue");
|
||||
_check.Tags.Should().Contain("redis");
|
||||
_check.Tags.Should().Contain("nats");
|
||||
_check.Tags.Should().Contain("infrastructure");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultSeverity_IsFail()
|
||||
{
|
||||
// Assert
|
||||
_check.DefaultSeverity.Should().Be(DoctorSeverity.Fail);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EstimatedDuration_IsReasonable()
|
||||
{
|
||||
// Assert
|
||||
_check.EstimatedDuration.Should().BeGreaterThan(TimeSpan.Zero);
|
||||
_check.EstimatedDuration.Should().BeLessThanOrEqualTo(TimeSpan.FromSeconds(10));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Name_IsNotEmpty()
|
||||
{
|
||||
// Assert
|
||||
_check.Name.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Description_IsNotEmpty()
|
||||
{
|
||||
// Assert
|
||||
_check.Description.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
private static DoctorPluginContext CreateContext(Dictionary<string, string?> configValues)
|
||||
{
|
||||
var config = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(configValues)
|
||||
.Build();
|
||||
|
||||
return new DoctorPluginContext
|
||||
{
|
||||
Services = new ServiceCollection().BuildServiceProvider(),
|
||||
Configuration = config,
|
||||
TimeProvider = TimeProvider.System,
|
||||
Logger = NullLogger.Instance,
|
||||
EnvironmentName = "Test",
|
||||
PluginConfig = config.GetSection("Doctor:Plugins")
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Tests.Checks;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
public class SlackConfiguredCheckTests
|
||||
{
|
||||
private readonly SlackConfiguredCheck _check = new();
|
||||
|
||||
[Fact]
|
||||
public void CheckId_ReturnsExpectedValue()
|
||||
{
|
||||
// Assert
|
||||
_check.CheckId.Should().Be("check.notify.slack.configured");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsFalse_WhenSlackNotConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>());
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsTrue_WhenSlackSectionExists()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Slack:WebhookUrl"] = "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXX"
|
||||
});
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Fails_WhenWebhookUrlNotSet()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Slack:Enabled"] = "true"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Fail);
|
||||
result.Diagnosis.Should().Contain("not configured");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Passes_WhenWebhookUrlSet()
|
||||
{
|
||||
// Arrange - note: SlackConfiguredCheck doesn't validate URL format, only presence
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Slack:WebhookUrl"] = "any-non-empty-value"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert - passes because webhook URL is set (format validation is done by connectivity check)
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Slack:WebhookUrl"] = "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXX",
|
||||
["Notify:Channels:Slack:Enabled"] = "false"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("disabled");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Passes_WhenProperlyConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Slack:WebhookUrl"] = "https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXX",
|
||||
["Notify:Channels:Slack:Enabled"] = "true",
|
||||
["Notify:Channels:Slack:Channel"] = "#alerts"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tags_ContainsExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
_check.Tags.Should().Contain("notify");
|
||||
_check.Tags.Should().Contain("slack");
|
||||
_check.Tags.Should().Contain("configuration");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultSeverity_IsWarn()
|
||||
{
|
||||
// Assert
|
||||
_check.DefaultSeverity.Should().Be(DoctorSeverity.Warn);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EstimatedDuration_IsQuick()
|
||||
{
|
||||
// Assert
|
||||
_check.EstimatedDuration.Should().BeLessThanOrEqualTo(TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
private static DoctorPluginContext CreateContext(Dictionary<string, string?> configValues)
|
||||
{
|
||||
var config = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(configValues)
|
||||
.Build();
|
||||
|
||||
return new DoctorPluginContext
|
||||
{
|
||||
Services = new ServiceCollection().BuildServiceProvider(),
|
||||
Configuration = config,
|
||||
TimeProvider = TimeProvider.System,
|
||||
Logger = NullLogger.Instance,
|
||||
EnvironmentName = "Test",
|
||||
PluginConfig = config.GetSection("Doctor:Plugins")
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Tests.Checks;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
public class TeamsConfiguredCheckTests
|
||||
{
|
||||
private readonly TeamsConfiguredCheck _check = new();
|
||||
|
||||
[Fact]
|
||||
public void CheckId_ReturnsExpectedValue()
|
||||
{
|
||||
// Assert
|
||||
_check.CheckId.Should().Be("check.notify.teams.configured");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsFalse_WhenTeamsNotConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>());
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsTrue_WhenTeamsSectionExists()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Teams:WebhookUrl"] = "https://webhook.office.com/..."
|
||||
});
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Fails_WhenWebhookUrlNotSet()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Teams:Enabled"] = "true"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Fail);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenNotOfficeComDomain()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Teams:WebhookUrl"] = "https://example.com/webhook"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("invalid");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Teams:WebhookUrl"] = "https://webhook.office.com/webhookb2/xxx",
|
||||
["Notify:Channels:Teams:Enabled"] = "false"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("disabled");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Passes_WhenProperlyConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Teams:WebhookUrl"] = "https://webhook.office.com/webhookb2/xxx@xxx/IncomingWebhook/xxx/xxx",
|
||||
["Notify:Channels:Teams:Enabled"] = "true"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tags_ContainsExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
_check.Tags.Should().Contain("notify");
|
||||
_check.Tags.Should().Contain("teams");
|
||||
_check.Tags.Should().Contain("configuration");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultSeverity_IsWarn()
|
||||
{
|
||||
// Assert
|
||||
_check.DefaultSeverity.Should().Be(DoctorSeverity.Warn);
|
||||
}
|
||||
|
||||
private static DoctorPluginContext CreateContext(Dictionary<string, string?> configValues)
|
||||
{
|
||||
var config = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(configValues)
|
||||
.Build();
|
||||
|
||||
return new DoctorPluginContext
|
||||
{
|
||||
Services = new ServiceCollection().BuildServiceProvider(),
|
||||
Configuration = config,
|
||||
TimeProvider = TimeProvider.System,
|
||||
Logger = NullLogger.Instance,
|
||||
EnvironmentName = "Test",
|
||||
PluginConfig = config.GetSection("Doctor:Plugins")
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Doctor.Models;
|
||||
using StellaOps.Doctor.Plugin.Notify.Checks;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Tests.Checks;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
public class WebhookConfiguredCheckTests
|
||||
{
|
||||
private readonly WebhookConfiguredCheck _check = new();
|
||||
|
||||
[Fact]
|
||||
public void CheckId_ReturnsExpectedValue()
|
||||
{
|
||||
// Assert
|
||||
_check.CheckId.Should().Be("check.notify.webhook.configured");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsFalse_WhenWebhookNotConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>());
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanRun_ReturnsTrue_WhenWebhookSectionExists()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Webhook:Url"] = "https://example.com/webhook"
|
||||
});
|
||||
|
||||
// Act & Assert
|
||||
_check.CanRun(context).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Fails_WhenUrlNotSet()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Webhook:Enabled"] = "true"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Fail);
|
||||
result.Diagnosis.Should().Contain("URL");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Fails_WhenUrlInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Webhook:Url"] = "not-a-valid-url"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Fail);
|
||||
result.Diagnosis.Should().Contain("format");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Warns_WhenDisabled()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Webhook:Url"] = "https://example.com/webhook",
|
||||
["Notify:Channels:Webhook:Enabled"] = "false"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Warn);
|
||||
result.Diagnosis.Should().Contain("disabled");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_Passes_WhenProperlyConfigured()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Webhook:Url"] = "https://example.com/webhook",
|
||||
["Notify:Channels:Webhook:Enabled"] = "true",
|
||||
["Notify:Channels:Webhook:Method"] = "POST"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunAsync_SupportsEndpointAlternativeKey()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext(new Dictionary<string, string?>
|
||||
{
|
||||
["Notify:Channels:Webhook:Endpoint"] = "https://example.com/webhook"
|
||||
});
|
||||
|
||||
// Act
|
||||
var result = await _check.RunAsync(context, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
result.Severity.Should().Be(DoctorSeverity.Pass);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tags_ContainsExpectedValues()
|
||||
{
|
||||
// Assert
|
||||
_check.Tags.Should().Contain("notify");
|
||||
_check.Tags.Should().Contain("webhook");
|
||||
_check.Tags.Should().Contain("configuration");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DefaultSeverity_IsWarn()
|
||||
{
|
||||
// Assert
|
||||
_check.DefaultSeverity.Should().Be(DoctorSeverity.Warn);
|
||||
}
|
||||
|
||||
private static DoctorPluginContext CreateContext(Dictionary<string, string?> configValues)
|
||||
{
|
||||
var config = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(configValues)
|
||||
.Build();
|
||||
|
||||
return new DoctorPluginContext
|
||||
{
|
||||
Services = new ServiceCollection().BuildServiceProvider(),
|
||||
Configuration = config,
|
||||
TimeProvider = TimeProvider.System,
|
||||
Logger = NullLogger.Instance,
|
||||
EnvironmentName = "Test",
|
||||
PluginConfig = config.GetSection("Doctor:Plugins")
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,178 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Doctor.Plugins;
|
||||
using StellaOps.Notify.Engine;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Doctor.Plugin.Notify.Tests;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
public class NotifyDoctorPluginTests
|
||||
{
|
||||
private readonly NotifyDoctorPlugin _plugin = new();
|
||||
|
||||
[Fact]
|
||||
public void PluginId_ReturnsExpectedValue()
|
||||
{
|
||||
// Assert
|
||||
_plugin.PluginId.Should().Be("stellaops.doctor.notify");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Category_IsNotify()
|
||||
{
|
||||
// Assert
|
||||
_plugin.Category.Should().Be(DoctorCategory.Notify);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DisplayName_IsNotifications()
|
||||
{
|
||||
// Assert
|
||||
_plugin.DisplayName.Should().Be("Notifications");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsAvailable_ReturnsFalse_WhenNoHealthProvidersRegistered()
|
||||
{
|
||||
// Arrange
|
||||
var services = new ServiceCollection().BuildServiceProvider();
|
||||
|
||||
// Act & Assert
|
||||
_plugin.IsAvailable(services).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsAvailable_ReturnsTrue_WhenHealthProvidersRegistered()
|
||||
{
|
||||
// Arrange
|
||||
var mockProvider = new Mock<INotifyChannelHealthProvider>();
|
||||
var services = new ServiceCollection()
|
||||
.AddSingleton(mockProvider.Object)
|
||||
.BuildServiceProvider();
|
||||
|
||||
// Act & Assert
|
||||
_plugin.IsAvailable(services).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChecks_ReturnsNineChecks()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act
|
||||
var checks = _plugin.GetChecks(context);
|
||||
|
||||
// Assert
|
||||
checks.Should().HaveCount(9);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChecks_ContainsSlackChecks()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act
|
||||
var checks = _plugin.GetChecks(context);
|
||||
|
||||
// Assert
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.slack.configured");
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.slack.connectivity");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChecks_ContainsTeamsChecks()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act
|
||||
var checks = _plugin.GetChecks(context);
|
||||
|
||||
// Assert
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.teams.configured");
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.teams.connectivity");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChecks_ContainsWebhookChecks()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act
|
||||
var checks = _plugin.GetChecks(context);
|
||||
|
||||
// Assert
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.webhook.configured");
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.webhook.connectivity");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChecks_ContainsEmailChecks()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act
|
||||
var checks = _plugin.GetChecks(context);
|
||||
|
||||
// Assert
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.email.configured");
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.email.connectivity");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetChecks_ContainsQueueHealthCheck()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act
|
||||
var checks = _plugin.GetChecks(context);
|
||||
|
||||
// Assert
|
||||
checks.Select(c => c.CheckId).Should().Contain("check.notify.queue.health");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InitializeAsync_CompletesWithoutError()
|
||||
{
|
||||
// Arrange
|
||||
var context = CreateContext();
|
||||
|
||||
// Act & Assert
|
||||
await _plugin.Invoking(p => p.InitializeAsync(context, CancellationToken.None))
|
||||
.Should().NotThrowAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Version_IsNotNull()
|
||||
{
|
||||
// Assert
|
||||
_plugin.Version.Should().NotBeNull();
|
||||
_plugin.Version.Major.Should().BeGreaterThanOrEqualTo(1);
|
||||
}
|
||||
|
||||
private static DoctorPluginContext CreateContext()
|
||||
{
|
||||
var config = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>())
|
||||
.Build();
|
||||
|
||||
return new DoctorPluginContext
|
||||
{
|
||||
Services = new ServiceCollection().BuildServiceProvider(),
|
||||
Configuration = config,
|
||||
TimeProvider = TimeProvider.System,
|
||||
Logger = NullLogger.Instance,
|
||||
EnvironmentName = "Test",
|
||||
PluginConfig = config.GetSection("Doctor:Plugins")
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Plugins\StellaOps.Doctor.Plugin.Notify\StellaOps.Doctor.Plugin.Notify.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,80 @@
|
||||
# StellaOps.Doctor.Plugin.Notify.Tests
|
||||
|
||||
## Overview
|
||||
|
||||
Unit tests for the Notification Doctor Plugin that validates Slack, Teams, Email, Webhook, and Queue configurations.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
### Plugin Tests
|
||||
- [x] PluginId validation
|
||||
- [x] Category is Notify
|
||||
- [x] DisplayName is Notifications
|
||||
- [x] IsAvailable returns false when no health providers registered
|
||||
- [x] IsAvailable returns true when health providers registered
|
||||
- [x] GetChecks returns all nine checks
|
||||
- [x] InitializeAsync completes without error
|
||||
- [x] Version validation
|
||||
|
||||
### SlackConfiguredCheck Tests
|
||||
- [x] CheckId validation
|
||||
- [x] CanRun returns false when not configured
|
||||
- [x] CanRun returns true when section exists
|
||||
- [x] Fails when WebhookUrl not set
|
||||
- [x] Fails when WebhookUrl invalid
|
||||
- [x] Warns when disabled
|
||||
- [x] Passes when properly configured
|
||||
- [x] Tags validation
|
||||
- [x] DefaultSeverity is Warn
|
||||
|
||||
### TeamsConfiguredCheck Tests
|
||||
- [x] CheckId validation
|
||||
- [x] CanRun returns false when not configured
|
||||
- [x] CanRun returns true when section exists
|
||||
- [x] Fails when WebhookUrl not set
|
||||
- [x] Warns when not webhook.office.com domain
|
||||
- [x] Warns when disabled
|
||||
- [x] Passes when properly configured
|
||||
- [x] Tags validation
|
||||
- [x] DefaultSeverity is Warn
|
||||
|
||||
### WebhookConfiguredCheck Tests
|
||||
- [x] CheckId validation
|
||||
- [x] CanRun returns false when not configured
|
||||
- [x] CanRun returns true when section exists
|
||||
- [x] Fails when URL not set
|
||||
- [x] Fails when URL invalid
|
||||
- [x] Warns when disabled
|
||||
- [x] Passes when properly configured
|
||||
- [x] Supports Endpoint alternative key
|
||||
- [x] Tags validation
|
||||
- [x] DefaultSeverity is Warn
|
||||
|
||||
### EmailConfiguredCheck Tests
|
||||
- [x] CheckId validation
|
||||
- [x] CanRun returns false when not configured
|
||||
- [x] CanRun returns true when section exists
|
||||
- [x] Fails when SmtpHost not set
|
||||
- [x] Warns when SmtpPort invalid
|
||||
- [x] Warns when FromAddress missing
|
||||
- [x] Warns when disabled
|
||||
- [x] Passes when properly configured
|
||||
- [x] Supports alternative Host/Port/From keys
|
||||
- [x] Tags validation
|
||||
- [x] DefaultSeverity is Warn
|
||||
|
||||
### NotifyQueueHealthCheck Tests
|
||||
- [x] CheckId validation
|
||||
- [x] CanRun returns false when not configured
|
||||
- [x] CanRun returns true when Transport configured
|
||||
- [x] CanRun returns true when Kind configured
|
||||
- [x] Skips when no health checks registered
|
||||
- [x] Tags validation
|
||||
- [x] DefaultSeverity is Critical
|
||||
- [x] EstimatedDuration validation
|
||||
|
||||
## Future Work
|
||||
|
||||
- [ ] Integration tests with actual SMTP server (Testcontainers)
|
||||
- [ ] Integration tests with actual Redis/NATS (Testcontainers)
|
||||
- [ ] Mock HTTP handler tests for connectivity checks
|
||||
@@ -30,7 +30,7 @@ public sealed class CombinedRuntimeAdapter : IExportAdapter
|
||||
|
||||
private static readonly JsonWriterOptions WriterOptions = new()
|
||||
{
|
||||
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
|
||||
Encoder = JavaScriptEncoder.Default,
|
||||
Indented = false,
|
||||
SkipValidation = false
|
||||
};
|
||||
@@ -66,7 +66,9 @@ public sealed class CombinedRuntimeAdapter : IExportAdapter
|
||||
|
||||
if (!result.Success)
|
||||
{
|
||||
return ExportAdapterResult.Failed(result.ErrorMessage ?? "Combined export failed");
|
||||
return ExportAdapterResult.Failed(
|
||||
result.ErrorMessage ?? "Combined export failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
var counts = new ExportManifestCounts
|
||||
@@ -106,12 +108,12 @@ public sealed class CombinedRuntimeAdapter : IExportAdapter
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
return ExportAdapterResult.Failed("Export cancelled");
|
||||
return ExportAdapterResult.Failed("Export cancelled", context.TimeProvider);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Combined runtime export failed");
|
||||
return ExportAdapterResult.Failed($"Export failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed($"Export failed: {ex.Message}", context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -187,10 +189,13 @@ public sealed class CombinedRuntimeAdapter : IExportAdapter
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
if (!content.Success)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch"));
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -220,7 +225,10 @@ public sealed class CombinedRuntimeAdapter : IExportAdapter
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
if (!content.Success)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch"));
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -58,13 +58,13 @@ public sealed record AdapterItemResult
|
||||
|
||||
public DateTimeOffset ProcessedAt { get; init; }
|
||||
|
||||
public static AdapterItemResult Failed(Guid itemId, string errorMessage)
|
||||
public static AdapterItemResult Failed(Guid itemId, string errorMessage, TimeProvider? timeProvider = null)
|
||||
=> new()
|
||||
{
|
||||
ItemId = itemId,
|
||||
Success = false,
|
||||
ErrorMessage = errorMessage,
|
||||
ProcessedAt = DateTimeOffset.UtcNow
|
||||
ProcessedAt = (timeProvider ?? TimeProvider.System).GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
@@ -87,8 +87,13 @@ public sealed record ExportAdapterResult
|
||||
|
||||
public DateTimeOffset CompletedAt { get; init; }
|
||||
|
||||
public static ExportAdapterResult Failed(string errorMessage)
|
||||
=> new() { Success = false, ErrorMessage = errorMessage, CompletedAt = DateTimeOffset.UtcNow };
|
||||
public static ExportAdapterResult Failed(string errorMessage, TimeProvider? timeProvider = null)
|
||||
=> new()
|
||||
{
|
||||
Success = false,
|
||||
ErrorMessage = errorMessage,
|
||||
CompletedAt = (timeProvider ?? TimeProvider.System).GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Core.Planner;
|
||||
|
||||
namespace StellaOps.ExportCenter.Core.Adapters;
|
||||
@@ -88,6 +89,11 @@ public sealed record ExportAdapterContext
|
||||
/// Time provider for deterministic timestamps.
|
||||
/// </summary>
|
||||
public TimeProvider TimeProvider { get; init; } = TimeProvider.System;
|
||||
|
||||
/// <summary>
|
||||
/// GUID provider for deterministic identifiers.
|
||||
/// </summary>
|
||||
public IGuidProvider GuidProvider { get; init; } = SystemGuidProvider.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -78,7 +78,9 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
}
|
||||
else
|
||||
{
|
||||
return ExportAdapterResult.Failed(ndjsonResult.ErrorMessage ?? "NDJSON export failed");
|
||||
return ExportAdapterResult.Failed(
|
||||
ndjsonResult.ErrorMessage ?? "NDJSON export failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -131,12 +133,12 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
return ExportAdapterResult.Failed("Export cancelled");
|
||||
return ExportAdapterResult.Failed("Export cancelled", context.TimeProvider);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "JSON policy export failed");
|
||||
return ExportAdapterResult.Failed($"Export failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed($"Export failed: {ex.Message}", context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,19 +187,25 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
if (!content.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch content");
|
||||
return AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch content",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(content.JsonContent))
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, "Item content is empty");
|
||||
return AdapterItemResult.Failed(item.ItemId, "Item content is empty", context.TimeProvider);
|
||||
}
|
||||
|
||||
// Normalize the data content
|
||||
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||
if (!normalized.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed");
|
||||
return AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
normalized.ErrorMessage ?? "Normalization failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
// Get policy metadata if evaluator is available
|
||||
@@ -223,12 +231,15 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
if (compression != CompressionFormat.None)
|
||||
{
|
||||
var compressed = _compressor.CompressBytes(outputBytes, compression);
|
||||
if (!compressed.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, compressed.ErrorMessage ?? "Compression failed");
|
||||
if (!compressed.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
compressed.ErrorMessage ?? "Compression failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
outputBytes = compressed.CompressedData!;
|
||||
}
|
||||
outputBytes = compressed.CompressedData!;
|
||||
}
|
||||
|
||||
// Write to file
|
||||
var fileName = BuildFileName(item, context.Config);
|
||||
@@ -257,7 +268,7 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||
return AdapterItemResult.Failed(item.ItemId, ex.Message);
|
||||
return AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,20 +318,26 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
if (!content.Success)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch"));
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(content.JsonContent))
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, "Empty content"));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, "Empty content", context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||
if (!normalized.Success)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed"));
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
normalized.ErrorMessage ?? "Normalization failed",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -348,7 +365,7 @@ public sealed class JsonPolicyAdapter : IExportAdapter
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -68,7 +68,9 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
}
|
||||
else
|
||||
{
|
||||
return ExportAdapterResult.Failed(ndjsonResult.ErrorMessage ?? "NDJSON export failed");
|
||||
return ExportAdapterResult.Failed(
|
||||
ndjsonResult.ErrorMessage ?? "NDJSON export failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -124,12 +126,12 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
return ExportAdapterResult.Failed("Export cancelled");
|
||||
return ExportAdapterResult.Failed("Export cancelled", context.TimeProvider);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "JSON raw export failed");
|
||||
return ExportAdapterResult.Failed($"Export failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed($"Export failed: {ex.Message}", context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,19 +180,25 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
if (!content.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch content");
|
||||
return AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch content",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(content.JsonContent))
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, "Item content is empty");
|
||||
return AdapterItemResult.Failed(item.ItemId, "Item content is empty", context.TimeProvider);
|
||||
}
|
||||
|
||||
// Normalize JSON
|
||||
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||
if (!normalized.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed");
|
||||
return AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
normalized.ErrorMessage ?? "Normalization failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
// Apply pretty print if requested
|
||||
@@ -209,7 +217,10 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
var compressed = _compressor.CompressBytes(outputBytes, compression);
|
||||
if (!compressed.Success)
|
||||
{
|
||||
return AdapterItemResult.Failed(item.ItemId, compressed.ErrorMessage ?? "Compression failed");
|
||||
return AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
compressed.ErrorMessage ?? "Compression failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
outputBytes = compressed.CompressedData!;
|
||||
}
|
||||
@@ -241,7 +252,7 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||
return AdapterItemResult.Failed(item.ItemId, ex.Message);
|
||||
return AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -261,20 +272,26 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
var content = await context.DataFetcher.FetchAsync(item, cancellationToken);
|
||||
if (!content.Success)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, content.ErrorMessage ?? "Failed to fetch"));
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(content.JsonContent))
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, "Empty content"));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, "Empty content", context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
var normalized = _normalizer.Normalize(content.JsonContent);
|
||||
if (!normalized.Success)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, normalized.ErrorMessage ?? "Normalization failed"));
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
normalized.ErrorMessage ?? "Normalization failed",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -292,7 +309,7 @@ public sealed class JsonRawAdapter : IExportAdapter
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ public sealed class MirrorAdapter : IExportAdapter
|
||||
context.Items.Count);
|
||||
|
||||
// Create temp directory for staging files
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-{Guid.NewGuid():N}");
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-{context.GuidProvider.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
@@ -81,7 +81,7 @@ public sealed class MirrorAdapter : IExportAdapter
|
||||
|
||||
// Build the mirror bundle
|
||||
var request = new MirrorBundleBuildRequest(
|
||||
Guid.TryParse(context.CorrelationId, out var runId) ? runId : Guid.NewGuid(),
|
||||
Guid.TryParse(context.CorrelationId, out var runId) ? runId : context.GuidProvider.NewGuid(),
|
||||
context.TenantId,
|
||||
MirrorBundleVariant.Full,
|
||||
selectors,
|
||||
@@ -176,7 +176,7 @@ public sealed class MirrorAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to build mirror bundle");
|
||||
return ExportAdapterResult.Failed($"Mirror bundle build failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed($"Mirror bundle build failed: {ex.Message}", context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -297,13 +297,13 @@ public sealed class MirrorAdapter : IExportAdapter
|
||||
OutputPath = tempFilePath,
|
||||
OutputSizeBytes = new FileInfo(tempFilePath).Length,
|
||||
ContentHash = content.OriginalHash,
|
||||
ProcessedAt = DateTimeOffset.UtcNow
|
||||
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -60,7 +60,8 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
if (deltaOptions is null)
|
||||
{
|
||||
return ExportAdapterResult.Failed(
|
||||
"Delta options required: provide 'baseExportId' and 'baseManifestDigest' in context metadata");
|
||||
"Delta options required: provide 'baseExportId' and 'baseManifestDigest' in context metadata",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
@@ -68,7 +69,7 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
deltaOptions.BaseExportId, context.Items.Count);
|
||||
|
||||
// Create temp directory for staging files
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-delta-{Guid.NewGuid():N}");
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"mirror-delta-{context.GuidProvider.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
@@ -100,7 +101,9 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
var deltaResult = await _deltaService.ComputeDeltaAsync(deltaRequest, cancellationToken);
|
||||
if (!deltaResult.Success)
|
||||
{
|
||||
return ExportAdapterResult.Failed(deltaResult.ErrorMessage ?? "Delta computation failed");
|
||||
return ExportAdapterResult.Failed(
|
||||
deltaResult.ErrorMessage ?? "Delta computation failed",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
// If no changes, return early with empty delta
|
||||
@@ -123,7 +126,7 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
|
||||
// Create the delta bundle request
|
||||
var bundleRequest = new MirrorBundleBuildRequest(
|
||||
Guid.TryParse(context.CorrelationId, out var runId) ? runId : Guid.NewGuid(),
|
||||
Guid.TryParse(context.CorrelationId, out var runId) ? runId : context.GuidProvider.NewGuid(),
|
||||
context.TenantId,
|
||||
MirrorBundleVariant.Delta,
|
||||
selectors,
|
||||
@@ -236,7 +239,9 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to build mirror delta bundle");
|
||||
return ExportAdapterResult.Failed($"Mirror delta bundle build failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed(
|
||||
$"Mirror delta bundle build failed: {ex.Message}",
|
||||
context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -320,7 +325,8 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
|
||||
content.ErrorMessage ?? "Failed to fetch content or content is empty",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -330,7 +336,8 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
$"Unknown item kind: {item.Kind}"));
|
||||
$"Unknown item kind: {item.Kind}",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -388,7 +395,7 @@ public sealed class MirrorDeltaAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -71,11 +71,12 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
if (_options.SchemaVersion != SupportedSchemaVersion)
|
||||
{
|
||||
return ExportAdapterResult.Failed(
|
||||
$"Unsupported Trivy DB schema version {_options.SchemaVersion}. Only v{SupportedSchemaVersion} is supported.");
|
||||
$"Unsupported Trivy DB schema version {_options.SchemaVersion}. Only v{SupportedSchemaVersion} is supported.",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
// Create temp directory for staging
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-db-{Guid.NewGuid():N}");
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-db-{context.GuidProvider.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
@@ -100,7 +101,8 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
if (totalVulnCount == 0 && !_options.AllowEmpty)
|
||||
{
|
||||
return ExportAdapterResult.Failed(
|
||||
"No vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.");
|
||||
"No vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
@@ -202,7 +204,9 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to build Trivy DB bundle");
|
||||
return ExportAdapterResult.Failed($"Trivy DB bundle build failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed(
|
||||
$"Trivy DB bundle build failed: {ex.Message}",
|
||||
context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -285,7 +289,8 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
|
||||
content.ErrorMessage ?? "Failed to fetch content or content is empty",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -298,7 +303,7 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
{
|
||||
ItemId = item.ItemId,
|
||||
Success = true,
|
||||
ProcessedAt = DateTimeOffset.UtcNow
|
||||
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||
});
|
||||
continue;
|
||||
}
|
||||
@@ -327,13 +332,13 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
ItemId = item.ItemId,
|
||||
Success = true,
|
||||
ContentHash = content.OriginalHash,
|
||||
ProcessedAt = DateTimeOffset.UtcNow
|
||||
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -384,7 +389,7 @@ public sealed class TrivyDbAdapter : IExportAdapter
|
||||
int vulnerabilityCount)
|
||||
{
|
||||
var now = context.TimeProvider.GetUtcNow();
|
||||
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : Guid.NewGuid();
|
||||
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : context.GuidProvider.NewGuid();
|
||||
|
||||
return new TrivyDbMetadata
|
||||
{
|
||||
|
||||
@@ -77,7 +77,7 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
context.Items.Count);
|
||||
|
||||
// Create temp directory for staging
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-java-db-{Guid.NewGuid():N}");
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"trivy-java-db-{context.GuidProvider.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
@@ -110,7 +110,8 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
if (totalVulnCount == 0 && !_options.AllowEmpty)
|
||||
{
|
||||
return ExportAdapterResult.Failed(
|
||||
"No Java vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.");
|
||||
"No Java vulnerabilities mapped. Set AllowEmpty=true to allow empty bundles.",
|
||||
context.TimeProvider);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
@@ -209,7 +210,9 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to build Trivy Java DB bundle");
|
||||
return ExportAdapterResult.Failed($"Trivy Java DB bundle build failed: {ex.Message}");
|
||||
return ExportAdapterResult.Failed(
|
||||
$"Trivy Java DB bundle build failed: {ex.Message}",
|
||||
context.TimeProvider);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,7 +289,8 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
{
|
||||
itemResults.Add(AdapterItemResult.Failed(
|
||||
item.ItemId,
|
||||
content.ErrorMessage ?? "Failed to fetch content or content is empty"));
|
||||
content.ErrorMessage ?? "Failed to fetch content or content is empty",
|
||||
context.TimeProvider));
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -299,7 +303,7 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
{
|
||||
ItemId = item.ItemId,
|
||||
Success = true,
|
||||
ProcessedAt = DateTimeOffset.UtcNow
|
||||
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||
});
|
||||
continue;
|
||||
}
|
||||
@@ -359,13 +363,13 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
ItemId = item.ItemId,
|
||||
Success = true,
|
||||
ContentHash = content.OriginalHash,
|
||||
ProcessedAt = DateTimeOffset.UtcNow
|
||||
ProcessedAt = context.TimeProvider.GetUtcNow()
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process item {ItemId}", item.ItemId);
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message));
|
||||
itemResults.Add(AdapterItemResult.Failed(item.ItemId, ex.Message, context.TimeProvider));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -454,7 +458,7 @@ public sealed class TrivyJavaDbAdapter : IExportAdapter
|
||||
int vulnerabilityCount)
|
||||
{
|
||||
var now = context.TimeProvider.GetUtcNow();
|
||||
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : Guid.NewGuid();
|
||||
var runId = Guid.TryParse(context.CorrelationId, out var id) ? id : context.GuidProvider.NewGuid();
|
||||
|
||||
return new TrivyJavaDbMetadata
|
||||
{
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
@@ -40,7 +43,7 @@ public sealed class ExportScopeResolver : IExportScopeResolver
|
||||
var items = GenerateResolvedItems(tenantId, scope);
|
||||
|
||||
// Apply sampling if configured
|
||||
var (sampledItems, samplingMetadata) = ApplySampling(items, scope.Sampling);
|
||||
var (sampledItems, samplingMetadata) = ApplySampling(items, scope.Sampling, tenantId, scope);
|
||||
|
||||
// Apply max items limit
|
||||
var maxItems = scope.MaxItems ?? DefaultMaxItems;
|
||||
@@ -223,7 +226,7 @@ public sealed class ExportScopeResolver : IExportScopeResolver
|
||||
foreach (var sourceRef in scope.SourceRefs)
|
||||
{
|
||||
var kind = scope.TargetKinds.FirstOrDefault() ?? "sbom";
|
||||
items.Add(CreateResolvedItem(sourceRef, kind, now));
|
||||
items.Add(CreateResolvedItem(tenantId, sourceRef, kind, now));
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -237,7 +240,7 @@ public sealed class ExportScopeResolver : IExportScopeResolver
|
||||
for (var i = 0; i < itemsPerKind; i++)
|
||||
{
|
||||
var sourceRef = $"{kind}-{tenantId:N}-{i:D4}";
|
||||
items.Add(CreateResolvedItem(sourceRef, kind, now.AddHours(-i)));
|
||||
items.Add(CreateResolvedItem(tenantId, sourceRef, kind, now.AddHours(-i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -285,11 +288,12 @@ public sealed class ExportScopeResolver : IExportScopeResolver
|
||||
return items;
|
||||
}
|
||||
|
||||
private ResolvedExportItem CreateResolvedItem(string sourceRef, string kind, DateTimeOffset createdAt)
|
||||
private static ResolvedExportItem CreateResolvedItem(Guid tenantId, string sourceRef, string kind, DateTimeOffset createdAt)
|
||||
{
|
||||
var itemId = CreateDeterministicItemId(tenantId, sourceRef, kind);
|
||||
return new ResolvedExportItem
|
||||
{
|
||||
ItemId = Guid.NewGuid(),
|
||||
ItemId = itemId,
|
||||
Kind = kind,
|
||||
SourceRef = sourceRef,
|
||||
Name = $"{kind}-{sourceRef}",
|
||||
@@ -308,14 +312,16 @@ public sealed class ExportScopeResolver : IExportScopeResolver
|
||||
|
||||
private static (List<ResolvedExportItem> Items, SamplingMetadata? Metadata) ApplySampling(
|
||||
List<ResolvedExportItem> items,
|
||||
SamplingConfig? sampling)
|
||||
SamplingConfig? sampling,
|
||||
Guid tenantId,
|
||||
ExportScope scope)
|
||||
{
|
||||
if (sampling is null || sampling.Strategy == SamplingStrategy.None)
|
||||
{
|
||||
return (items, null);
|
||||
}
|
||||
|
||||
var seed = sampling.Seed ?? Environment.TickCount;
|
||||
var seed = sampling.Seed ?? ComputeDeterministicSeed(tenantId, scope);
|
||||
var size = Math.Min(sampling.Size, items.Count);
|
||||
|
||||
List<ResolvedExportItem> sampled;
|
||||
@@ -382,4 +388,66 @@ public sealed class ExportScopeResolver : IExportScopeResolver
|
||||
_ => item.Metadata.TryGetValue(field, out var value) ? value : "unknown"
|
||||
};
|
||||
}
|
||||
|
||||
private static Guid CreateDeterministicItemId(Guid tenantId, string sourceRef, string kind)
|
||||
{
|
||||
var seed = $"{tenantId:D}|{kind}|{sourceRef}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
|
||||
return new Guid(hash.AsSpan(0, 16).ToArray());
|
||||
}
|
||||
|
||||
private static int ComputeDeterministicSeed(Guid tenantId, ExportScope scope)
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.Append("tenant=").Append(tenantId.ToString("D"));
|
||||
AppendList(builder, "targets", scope.TargetKinds);
|
||||
AppendList(builder, "sources", scope.SourceRefs);
|
||||
AppendList(builder, "tags", scope.Tags);
|
||||
AppendList(builder, "namespaces", scope.Namespaces);
|
||||
AppendList(builder, "exclude", scope.ExcludePatterns);
|
||||
AppendList(builder, "runIds", scope.RunIds.Select(id => id.ToString("D")).ToList());
|
||||
|
||||
if (scope.DateRange is not null)
|
||||
{
|
||||
builder.Append("|dateField=").Append(scope.DateRange.Field.ToString());
|
||||
if (scope.DateRange.From.HasValue)
|
||||
{
|
||||
builder.Append("|dateFrom=").Append(scope.DateRange.From.Value.ToString("O", CultureInfo.InvariantCulture));
|
||||
}
|
||||
if (scope.DateRange.To.HasValue)
|
||||
{
|
||||
builder.Append("|dateTo=").Append(scope.DateRange.To.Value.ToString("O", CultureInfo.InvariantCulture));
|
||||
}
|
||||
}
|
||||
|
||||
if (scope.MaxItems.HasValue)
|
||||
{
|
||||
builder.Append("|maxItems=").Append(scope.MaxItems.Value.ToString(CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
if (scope.Sampling is not null)
|
||||
{
|
||||
builder.Append("|sampling=").Append(scope.Sampling.Strategy.ToString());
|
||||
builder.Append("|sampleSize=").Append(scope.Sampling.Size.ToString(CultureInfo.InvariantCulture));
|
||||
if (!string.IsNullOrWhiteSpace(scope.Sampling.StratifyBy))
|
||||
{
|
||||
builder.Append("|stratifyBy=").Append(scope.Sampling.StratifyBy);
|
||||
}
|
||||
}
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
|
||||
return BinaryPrimitives.ReadInt32LittleEndian(hash.AsSpan(0, 4));
|
||||
}
|
||||
|
||||
private static void AppendList(StringBuilder builder, string label, IReadOnlyList<string> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append('|').Append(label).Append('=');
|
||||
var ordered = values.OrderBy(v => v, StringComparer.Ordinal);
|
||||
builder.Append(string.Join(",", ordered));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
using System.Reflection;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
using StellaOps.ExportCenter.WebService.Api;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Api;
|
||||
|
||||
public sealed class ExportApiEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public void MapToProfileResponse_InvalidJson_ReturnsNullConfig()
|
||||
{
|
||||
var now = new DateTimeOffset(2025, 1, 2, 14, 0, 0, TimeSpan.Zero);
|
||||
var profile = new ExportProfile
|
||||
{
|
||||
ProfileId = Guid.NewGuid(),
|
||||
TenantId = Guid.NewGuid(),
|
||||
Name = "test",
|
||||
Kind = ExportProfileKind.AdHoc,
|
||||
Status = ExportProfileStatus.Active,
|
||||
ScopeJson = "{invalid",
|
||||
FormatJson = "{invalid",
|
||||
SigningJson = "{invalid",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now
|
||||
};
|
||||
|
||||
var method = typeof(ExportApiEndpoints).GetMethod(
|
||||
"MapToProfileResponse",
|
||||
BindingFlags.NonPublic | BindingFlags.Static);
|
||||
|
||||
Assert.NotNull(method);
|
||||
|
||||
var response = (ExportProfileResponse)method!.Invoke(null, new object[] { profile })!;
|
||||
|
||||
Assert.Null(response.Scope);
|
||||
Assert.Null(response.Format);
|
||||
Assert.Null(response.Signing);
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,19 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
using StellaOps.ExportCenter.WebService.Api;
|
||||
using StellaOps.TestKit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Api;
|
||||
|
||||
public class ExportApiRepositoryTests
|
||||
{
|
||||
private readonly Guid _tenantId = Guid.NewGuid();
|
||||
private readonly Guid _tenantId = Guid.Parse("00000000-0000-0000-0000-000000000001");
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public ExportApiRepositoryTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// Profile Repository Tests
|
||||
@@ -16,7 +23,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_CreateAsync_StoresProfile()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile();
|
||||
|
||||
// Act
|
||||
@@ -31,7 +38,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_GetByIdAsync_ReturnsStoredProfile()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile();
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -48,7 +55,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_GetByIdAsync_ReturnsNull_WhenNotFound()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
|
||||
// Act
|
||||
var retrieved = await repo.GetByIdAsync(_tenantId, Guid.NewGuid());
|
||||
@@ -61,7 +68,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_GetByIdAsync_ReturnsNull_WhenWrongTenant()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile();
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -76,7 +83,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_ListAsync_ReturnsAllProfilesForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile1 = CreateTestProfile("Profile 1");
|
||||
var profile2 = CreateTestProfile("Profile 2");
|
||||
var otherTenantProfile = CreateTestProfile("Other Tenant") with { TenantId = Guid.NewGuid() };
|
||||
@@ -98,7 +105,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_ListAsync_FiltersByStatus()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var activeProfile = CreateTestProfile("Active") with { Status = ExportProfileStatus.Active };
|
||||
var draftProfile = CreateTestProfile("Draft") with { Status = ExportProfileStatus.Draft };
|
||||
|
||||
@@ -118,7 +125,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_ListAsync_FiltersByKind()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var adhocProfile = CreateTestProfile("AdHoc") with { Kind = ExportProfileKind.AdHoc };
|
||||
var scheduledProfile = CreateTestProfile("Scheduled") with { Kind = ExportProfileKind.Scheduled };
|
||||
|
||||
@@ -138,7 +145,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_ListAsync_SearchesByName()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile1 = CreateTestProfile("Daily SBOM Export");
|
||||
var profile2 = CreateTestProfile("Weekly VEX Export");
|
||||
|
||||
@@ -158,7 +165,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_UpdateAsync_ModifiesProfile()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile();
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -179,7 +186,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_ArchiveAsync_SetsArchivedStatus()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile();
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -193,13 +200,15 @@ public class ExportApiRepositoryTests
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(ExportProfileStatus.Archived, retrieved.Status);
|
||||
Assert.NotNull(retrieved.ArchivedAt);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), retrieved.ArchivedAt);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), retrieved.UpdatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProfileRepo_IsNameUniqueAsync_ReturnsTrueForUniqueName()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile("Existing Profile");
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -214,7 +223,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_IsNameUniqueAsync_ReturnsFalseForDuplicateName()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile("Existing Profile");
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -229,7 +238,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task ProfileRepo_IsNameUniqueAsync_ExcludesSpecifiedProfile()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance);
|
||||
var repo = new InMemoryExportProfileRepository(NullLogger<InMemoryExportProfileRepository>.Instance, _timeProvider);
|
||||
var profile = CreateTestProfile("Existing Profile");
|
||||
await repo.CreateAsync(profile);
|
||||
|
||||
@@ -248,7 +257,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_CreateAsync_StoresRun()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var run = CreateTestRun();
|
||||
|
||||
// Act
|
||||
@@ -263,7 +272,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_GetByIdAsync_ReturnsStoredRun()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var run = CreateTestRun();
|
||||
await repo.CreateAsync(run);
|
||||
|
||||
@@ -279,7 +288,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_ListAsync_FiltersByProfileId()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var profileId1 = Guid.NewGuid();
|
||||
var profileId2 = Guid.NewGuid();
|
||||
|
||||
@@ -302,7 +311,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_ListAsync_FiltersByStatus()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var runningRun = CreateTestRun() with { Status = ExportRunStatus.Running };
|
||||
var completedRun = CreateTestRun() with { Status = ExportRunStatus.Completed };
|
||||
|
||||
@@ -322,7 +331,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_CancelAsync_CancelsQueuedRun()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var run = CreateTestRun() with { Status = ExportRunStatus.Queued };
|
||||
await repo.CreateAsync(run);
|
||||
|
||||
@@ -334,13 +343,14 @@ public class ExportApiRepositoryTests
|
||||
|
||||
var retrieved = await repo.GetByIdAsync(_tenantId, run.RunId);
|
||||
Assert.Equal(ExportRunStatus.Cancelled, retrieved?.Status);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), retrieved?.CompletedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunRepo_CancelAsync_CancelsRunningRun()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var run = CreateTestRun() with { Status = ExportRunStatus.Running };
|
||||
await repo.CreateAsync(run);
|
||||
|
||||
@@ -355,7 +365,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_CancelAsync_ReturnsFalseForCompletedRun()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var run = CreateTestRun() with { Status = ExportRunStatus.Completed };
|
||||
await repo.CreateAsync(run);
|
||||
|
||||
@@ -370,7 +380,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_GetActiveRunsCountAsync_CountsRunningRuns()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
|
||||
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Running });
|
||||
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Running });
|
||||
@@ -388,7 +398,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_GetActiveRunsCountAsync_FiltersByProfileId()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var profileId = Guid.NewGuid();
|
||||
|
||||
await repo.CreateAsync(CreateTestRun() with { ProfileId = profileId, Status = ExportRunStatus.Running });
|
||||
@@ -405,7 +415,7 @@ public class ExportApiRepositoryTests
|
||||
public async Task RunRepo_GetQueuedRunsCountAsync_CountsQueuedRuns()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance);
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
|
||||
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Queued });
|
||||
await repo.CreateAsync(CreateTestRun() with { Status = ExportRunStatus.Queued });
|
||||
@@ -418,6 +428,23 @@ public class ExportApiRepositoryTests
|
||||
Assert.Equal(2, count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RunRepo_DequeueNextRunAsync_MarksRunAsRunning()
|
||||
{
|
||||
// Arrange
|
||||
var repo = new InMemoryExportRunRepository(NullLogger<InMemoryExportRunRepository>.Instance, _timeProvider);
|
||||
var run = CreateTestRun() with { Status = ExportRunStatus.Queued };
|
||||
await repo.CreateAsync(run);
|
||||
|
||||
// Act
|
||||
var dequeued = await repo.DequeueNextRunAsync(_tenantId);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(dequeued);
|
||||
Assert.Equal(ExportRunStatus.Running, dequeued!.Status);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), dequeued.StartedAt);
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// Artifact Repository Tests
|
||||
// ========================================================================
|
||||
@@ -507,8 +534,8 @@ public class ExportApiRepositoryTests
|
||||
Description = "Test profile description",
|
||||
Kind = ExportProfileKind.AdHoc,
|
||||
Status = ExportProfileStatus.Active,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
UpdatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
@@ -522,7 +549,7 @@ public class ExportApiRepositoryTests
|
||||
Status = ExportRunStatus.Running,
|
||||
Trigger = ExportRunTrigger.Api,
|
||||
CorrelationId = Guid.NewGuid().ToString(),
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
CreatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
@@ -539,7 +566,8 @@ public class ExportApiRepositoryTests
|
||||
SizeBytes = 1024,
|
||||
ContentType = "application/json",
|
||||
Checksum = "sha256:abc123",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
CreatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.ExportCenter.WebService.Api;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Api;
|
||||
|
||||
public sealed class ExportApiServiceCollectionExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void AddExportApiServices_Throws_WhenInMemoryNotAllowed()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
var exception = Assert.Throws<InvalidOperationException>(() =>
|
||||
services.AddExportApiServices(_ => { }, allowInMemoryRepositories: false));
|
||||
|
||||
Assert.Contains("In-memory export repositories are disabled", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddExportApiServices_AllowsExplicitInMemoryRegistration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
|
||||
services.AddExportApiServices(_ => { }, allowInMemoryRepositories: true);
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var repo = provider.GetService<IExportProfileRepository>();
|
||||
Assert.NotNull(repo);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.ExportCenter.WebService.Api;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Api;
|
||||
|
||||
@@ -12,6 +13,7 @@ public class ExportAuditServiceTests
|
||||
{
|
||||
_auditService = new ExportAuditService(
|
||||
NullLogger<ExportAuditService>.Instance,
|
||||
new SequentialGuidProvider(),
|
||||
TimeProvider.System);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using StellaOps.ExportCenter.WebService;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Api;
|
||||
|
||||
public sealed class OpenApiDiscoveryEndpointsTests
|
||||
{
|
||||
[Fact]
|
||||
public void MapOpenApiDiscovery_AllowsAnonymousWhenConfigured()
|
||||
{
|
||||
var builder = CreateBuilder();
|
||||
builder.Configuration["OpenApi:AllowAnonymous"] = "true";
|
||||
|
||||
var app = builder.Build();
|
||||
app.MapOpenApiDiscovery();
|
||||
|
||||
var endpoint = GetEndpoint(app, "/.well-known/openapi");
|
||||
var allowAnonymous = endpoint.Metadata.GetMetadata<IAllowAnonymous>();
|
||||
|
||||
Assert.NotNull(allowAnonymous);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MapOpenApiDiscovery_DoesNotAllowAnonymousWhenDisabled()
|
||||
{
|
||||
var builder = CreateBuilder();
|
||||
builder.Configuration["OpenApi:AllowAnonymous"] = "false";
|
||||
|
||||
var app = builder.Build();
|
||||
app.MapOpenApiDiscovery();
|
||||
|
||||
var endpoint = GetEndpoint(app, "/.well-known/openapi");
|
||||
var allowAnonymous = endpoint.Metadata.GetMetadata<IAllowAnonymous>();
|
||||
|
||||
Assert.Null(allowAnonymous);
|
||||
}
|
||||
|
||||
private static RouteEndpoint GetEndpoint(IEndpointRouteBuilder app, string pattern)
|
||||
{
|
||||
var endpoints = app.DataSources.SelectMany(source => source.Endpoints).OfType<RouteEndpoint>();
|
||||
return endpoints.Single(endpoint => string.Equals(endpoint.RoutePattern.RawText, pattern, StringComparison.Ordinal));
|
||||
}
|
||||
|
||||
private static WebApplicationBuilder CreateBuilder()
|
||||
{
|
||||
var contentRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(contentRoot);
|
||||
|
||||
return WebApplication.CreateBuilder(new WebApplicationOptions
|
||||
{
|
||||
EnvironmentName = Environments.Production,
|
||||
ContentRootPath = contentRoot
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Client.Models;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.AuditBundle;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.AuditBundle;
|
||||
|
||||
public sealed class AuditBundleJobHandlerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CreateBundleAsync_UsesGuidProvider()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 10, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var handler = new AuditBundleJobHandler(
|
||||
NullLogger<AuditBundleJobHandler>.Instance,
|
||||
guidProvider,
|
||||
timeProvider);
|
||||
|
||||
var request = new CreateAuditBundleRequest(
|
||||
new BundleSubjectRefDto(
|
||||
"container",
|
||||
"example-image",
|
||||
new Dictionary<string, string> { ["sha256"] = "abc123" }),
|
||||
TimeWindow: null,
|
||||
IncludeContent: new AuditBundleContentSelection(
|
||||
VulnReports: false,
|
||||
Sbom: false,
|
||||
VexDecisions: false,
|
||||
PolicyEvaluations: false,
|
||||
Attestations: false));
|
||||
|
||||
var result = await handler.CreateBundleAsync(request, "actor-1", "Actor One", CancellationToken.None);
|
||||
|
||||
Assert.NotNull(result.Response);
|
||||
Assert.Equal("bndl-00000000000000000000000000000001", result.Response!.BundleId);
|
||||
}
|
||||
}
|
||||
@@ -46,8 +46,8 @@ public sealed class DeprecationHeaderExtensionsTests
|
||||
{
|
||||
var context = CreateHttpContext();
|
||||
var info = new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow,
|
||||
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
|
||||
DeprecatedAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SunsetAt: new DateTimeOffset(2025, 7, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SuccessorPath: "/v1/new",
|
||||
DocumentationUrl: "https://docs.example.com/migration");
|
||||
|
||||
@@ -76,8 +76,8 @@ public sealed class DeprecationHeaderExtensionsTests
|
||||
{
|
||||
var context = CreateHttpContext();
|
||||
var info = new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow,
|
||||
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
|
||||
DeprecatedAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SunsetAt: new DateTimeOffset(2025, 7, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SuccessorPath: "/v1/new",
|
||||
Reason: "Custom deprecation reason");
|
||||
|
||||
@@ -123,8 +123,8 @@ public sealed class DeprecationHeaderExtensionsTests
|
||||
private static DeprecationInfo CreateSampleDeprecationInfo()
|
||||
{
|
||||
return new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow,
|
||||
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
|
||||
DeprecatedAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SunsetAt: new DateTimeOffset(2025, 7, 1, 0, 0, 0, TimeSpan.Zero),
|
||||
SuccessorPath: "/v1/new-endpoint");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,46 +8,54 @@ public sealed class DeprecationInfoTests
|
||||
[Fact]
|
||||
public void IsPastSunset_WhenSunsetInFuture_ReturnsFalse()
|
||||
{
|
||||
var now = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FixedTimeProvider(now);
|
||||
var info = new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-1),
|
||||
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
|
||||
DeprecatedAt: now.AddMonths(-1),
|
||||
SunsetAt: now.AddMonths(6),
|
||||
SuccessorPath: "/v1/new");
|
||||
|
||||
Assert.False(info.IsPastSunset);
|
||||
Assert.False(info.IsPastSunsetAt(timeProvider));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsPastSunset_WhenSunsetInPast_ReturnsTrue()
|
||||
{
|
||||
var now = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FixedTimeProvider(now);
|
||||
var info = new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-12),
|
||||
SunsetAt: DateTimeOffset.UtcNow.AddMonths(-1),
|
||||
DeprecatedAt: now.AddMonths(-12),
|
||||
SunsetAt: now.AddMonths(-1),
|
||||
SuccessorPath: "/v1/new");
|
||||
|
||||
Assert.True(info.IsPastSunset);
|
||||
Assert.True(info.IsPastSunsetAt(timeProvider));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DaysUntilSunset_CalculatesCorrectly()
|
||||
{
|
||||
var sunset = DateTimeOffset.UtcNow.AddDays(30);
|
||||
var now = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FixedTimeProvider(now);
|
||||
var sunset = now.AddDays(30);
|
||||
var info = new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow,
|
||||
DeprecatedAt: now,
|
||||
SunsetAt: sunset,
|
||||
SuccessorPath: "/v1/new");
|
||||
|
||||
Assert.Equal(30, info.DaysUntilSunset);
|
||||
Assert.Equal(30, info.DaysUntilSunsetAt(timeProvider));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DaysUntilSunset_WhenPastSunset_ReturnsZero()
|
||||
{
|
||||
var now = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FixedTimeProvider(now);
|
||||
var info = new DeprecationInfo(
|
||||
DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-12),
|
||||
SunsetAt: DateTimeOffset.UtcNow.AddMonths(-1),
|
||||
DeprecatedAt: now.AddMonths(-12),
|
||||
SunsetAt: now.AddMonths(-1),
|
||||
SuccessorPath: "/v1/new");
|
||||
|
||||
Assert.Equal(0, info.DaysUntilSunset);
|
||||
Assert.Equal(0, info.DaysUntilSunsetAt(timeProvider));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -69,4 +77,16 @@ public sealed class DeprecationInfoTests
|
||||
Assert.Equal("https://docs.example.com", info.DocumentationUrl);
|
||||
Assert.Equal("Replaced by new API", info.Reason);
|
||||
}
|
||||
|
||||
private sealed class FixedTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _utcNow;
|
||||
|
||||
public FixedTimeProvider(DateTimeOffset utcNow)
|
||||
{
|
||||
_utcNow = utcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _utcNow;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
using StellaOps.ExportCenter.WebService.Distribution;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Distribution;
|
||||
|
||||
@@ -9,17 +10,20 @@ public sealed class ExportDistributionLifecycleTests
|
||||
private readonly InMemoryExportDistributionRepository _repository;
|
||||
private readonly ExportDistributionLifecycle _lifecycle;
|
||||
private readonly TestTimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly Guid _tenantId = Guid.NewGuid();
|
||||
private readonly Guid _runId = Guid.NewGuid();
|
||||
private readonly Guid _profileId = Guid.NewGuid();
|
||||
|
||||
public ExportDistributionLifecycleTests()
|
||||
{
|
||||
_repository = new InMemoryExportDistributionRepository();
|
||||
_timeProvider = new TestTimeProvider(new DateTimeOffset(2024, 6, 15, 12, 0, 0, TimeSpan.Zero));
|
||||
_guidProvider = new SequentialGuidProvider();
|
||||
_repository = new InMemoryExportDistributionRepository(_timeProvider);
|
||||
_lifecycle = new ExportDistributionLifecycle(
|
||||
_repository,
|
||||
NullLogger<ExportDistributionLifecycle>.Instance,
|
||||
_guidProvider,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,20 +1,30 @@
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.Distribution;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Distribution;
|
||||
|
||||
public sealed class InMemoryExportDistributionRepositoryTests
|
||||
{
|
||||
private readonly InMemoryExportDistributionRepository _repository = new();
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly InMemoryExportDistributionRepository _repository;
|
||||
private readonly Guid _tenantId = Guid.NewGuid();
|
||||
private readonly Guid _runId = Guid.NewGuid();
|
||||
|
||||
public InMemoryExportDistributionRepositoryTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
_repository = new InMemoryExportDistributionRepository(_timeProvider);
|
||||
}
|
||||
|
||||
private ExportDistribution CreateDistribution(
|
||||
Guid? distributionId = null,
|
||||
Guid? tenantId = null,
|
||||
Guid? runId = null,
|
||||
string? idempotencyKey = null,
|
||||
ExportDistributionStatus status = ExportDistributionStatus.Pending)
|
||||
ExportDistributionStatus status = ExportDistributionStatus.Pending,
|
||||
DateTimeOffset? createdAt = null)
|
||||
{
|
||||
return new ExportDistribution
|
||||
{
|
||||
@@ -28,7 +38,7 @@ public sealed class InMemoryExportDistributionRepositoryTests
|
||||
ArtifactHash = "sha256:abc123",
|
||||
SizeBytes = 1024,
|
||||
IdempotencyKey = idempotencyKey,
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
CreatedAt = createdAt ?? _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
@@ -138,7 +148,7 @@ public sealed class InMemoryExportDistributionRepositoryTests
|
||||
[Fact]
|
||||
public async Task ListExpiredAsync_ReturnsOnlyExpired()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var expired = new ExportDistribution
|
||||
{
|
||||
@@ -151,7 +161,7 @@ public sealed class InMemoryExportDistributionRepositoryTests
|
||||
ArtifactPath = "/test",
|
||||
RetentionExpiresAt = now.AddDays(-1),
|
||||
MarkedForDeletion = false,
|
||||
CreatedAt = now.AddDays(-30)
|
||||
CreatedAt = now.AddHours(-1)
|
||||
};
|
||||
|
||||
var notExpired = new ExportDistribution
|
||||
@@ -165,7 +175,7 @@ public sealed class InMemoryExportDistributionRepositoryTests
|
||||
ArtifactPath = "/test",
|
||||
RetentionExpiresAt = now.AddDays(30),
|
||||
MarkedForDeletion = false,
|
||||
CreatedAt = now.AddDays(-30)
|
||||
CreatedAt = now.AddHours(-1)
|
||||
};
|
||||
|
||||
await _repository.CreateAsync(expired);
|
||||
@@ -273,6 +283,7 @@ public sealed class InMemoryExportDistributionRepositoryTests
|
||||
var updated = await _repository.GetByIdAsync(_tenantId, distribution.DistributionId);
|
||||
Assert.True(updated?.MarkedForDeletion);
|
||||
Assert.NotNull(updated?.DeletedAt);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), updated?.DeletedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -339,4 +350,50 @@ public sealed class InMemoryExportDistributionRepositoryTests
|
||||
var result = _repository.ListByRunAsync(_tenantId, _runId).GetAwaiter().GetResult();
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PruneStale_RemovesEntriesBeyondRetention()
|
||||
{
|
||||
var options = Options.Create(new InMemoryExportDistributionOptions
|
||||
{
|
||||
RetentionPeriod = TimeSpan.FromHours(1),
|
||||
MaxEntries = 0
|
||||
});
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero));
|
||||
var repository = new InMemoryExportDistributionRepository(timeProvider, options);
|
||||
|
||||
var stale = CreateDistribution(createdAt: timeProvider.GetUtcNow().AddHours(-2));
|
||||
var fresh = CreateDistribution(createdAt: timeProvider.GetUtcNow().AddMinutes(-30));
|
||||
|
||||
await repository.CreateAsync(stale);
|
||||
await repository.CreateAsync(fresh);
|
||||
|
||||
var result = await repository.ListByRunAsync(_tenantId, _runId);
|
||||
|
||||
Assert.Single(result);
|
||||
Assert.Equal(fresh.DistributionId, result[0].DistributionId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PruneStale_RespectsMaxEntries()
|
||||
{
|
||||
var options = Options.Create(new InMemoryExportDistributionOptions
|
||||
{
|
||||
RetentionPeriod = TimeSpan.Zero,
|
||||
MaxEntries = 1
|
||||
});
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 12, 0, 0, TimeSpan.Zero));
|
||||
var repository = new InMemoryExportDistributionRepository(timeProvider, options);
|
||||
|
||||
var older = CreateDistribution(createdAt: timeProvider.GetUtcNow().AddMinutes(-10));
|
||||
var newer = CreateDistribution(createdAt: timeProvider.GetUtcNow());
|
||||
|
||||
await repository.CreateAsync(older);
|
||||
await repository.CreateAsync(newer);
|
||||
|
||||
var result = await repository.ListByRunAsync(_tenantId, _runId);
|
||||
|
||||
Assert.Single(result);
|
||||
Assert.Equal(newer.DistributionId, result[0].DistributionId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
using System.Net.Http;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Http;
|
||||
using StellaOps.ExportCenter.WebService.Distribution.Oci;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Distribution.Oci;
|
||||
|
||||
public sealed class OciDistributionServiceExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void AddOciDistribution_AllowInsecureTls_UsesValidationCallback()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddOciDistribution(options => options.AllowInsecureTls = true);
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
var factory = provider.GetRequiredService<IHttpMessageHandlerFactory>();
|
||||
|
||||
var handler = factory.CreateHandler(OciDistributionOptions.HttpClientName);
|
||||
var primary = GetPrimaryHandler(handler);
|
||||
|
||||
Assert.NotNull(primary.ServerCertificateCustomValidationCallback);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddOciDistribution_DisallowInsecureTls_DoesNotSetValidationCallback()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddOciDistribution(options => options.AllowInsecureTls = false);
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
var factory = provider.GetRequiredService<IHttpMessageHandlerFactory>();
|
||||
|
||||
var handler = factory.CreateHandler(OciDistributionOptions.HttpClientName);
|
||||
var primary = GetPrimaryHandler(handler);
|
||||
|
||||
Assert.Null(primary.ServerCertificateCustomValidationCallback);
|
||||
}
|
||||
|
||||
private static HttpClientHandler GetPrimaryHandler(HttpMessageHandler handler)
|
||||
{
|
||||
var current = handler;
|
||||
while (current is DelegatingHandler delegating)
|
||||
{
|
||||
current = delegating.InnerHandler ?? throw new InvalidOperationException("Missing inner handler.");
|
||||
}
|
||||
|
||||
return Assert.IsType<HttpClientHandler>(current);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
using System.Net.Http;
|
||||
using StellaOps.ExportCenter.WebService.Distribution.Oci;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Distribution.Oci;
|
||||
|
||||
public sealed class OciHttpClientFactoryTests
|
||||
{
|
||||
[Fact]
|
||||
public void CreateClient_ConfiguresBaseAddressAndTimeout()
|
||||
{
|
||||
using var httpClient = new HttpClient();
|
||||
var config = new OciRegistryConfig
|
||||
{
|
||||
Global = new RegistryGlobalSettings
|
||||
{
|
||||
Timeout = TimeSpan.FromSeconds(12),
|
||||
UserAgent = "StellaOps-Test"
|
||||
},
|
||||
Registries =
|
||||
{
|
||||
["registry.example.com"] = new RegistryEndpointConfig
|
||||
{
|
||||
Host = "registry.example.com",
|
||||
Port = 5000,
|
||||
Insecure = true
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var factory = new OciHttpClientFactory(config, new FakeHttpClientFactory(httpClient));
|
||||
|
||||
var client = factory.CreateClient("registry.example.com");
|
||||
|
||||
Assert.Same(httpClient, client);
|
||||
Assert.Equal(new Uri("http://registry.example.com:5000"), client.BaseAddress);
|
||||
Assert.Equal(TimeSpan.FromSeconds(12), client.Timeout);
|
||||
Assert.Contains("StellaOps-Test", client.DefaultRequestHeaders.UserAgent.ToString());
|
||||
}
|
||||
|
||||
private sealed class FakeHttpClientFactory : IHttpClientFactory
|
||||
{
|
||||
private readonly HttpClient _client;
|
||||
|
||||
public FakeHttpClientFactory(HttpClient client)
|
||||
{
|
||||
_client = client;
|
||||
}
|
||||
|
||||
public HttpClient CreateClient(string name) => _client;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.ExportCenter.WebService.EvidenceLocker;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.EvidenceLocker;
|
||||
|
||||
public sealed class EvidenceLockerServiceCollectionExtensionsTests
|
||||
{
|
||||
[Fact]
|
||||
public void AddExportEvidenceLocker_InvalidBaseUrl_Throws()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddExportEvidenceLocker(options => options.BaseUrl = "not-a-url");
|
||||
|
||||
using var provider = services.BuildServiceProvider();
|
||||
|
||||
Assert.Throws<InvalidOperationException>(() =>
|
||||
provider.GetRequiredService<IExportEvidenceLockerClient>());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.EvidenceLocker;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.EvidenceLocker;
|
||||
|
||||
public sealed class InMemoryExportEvidenceLockerClientTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task PushSnapshotAsync_SortsEntriesAndUsesDeterministicIds()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 8, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var calculator = new ExportMerkleTreeCalculator();
|
||||
var client = new InMemoryExportEvidenceLockerClient(calculator, timeProvider, guidProvider);
|
||||
|
||||
var request = new ExportEvidenceSnapshotRequest
|
||||
{
|
||||
TenantId = "tenant-1",
|
||||
ExportRunId = "run-1",
|
||||
ProfileId = "profile-1",
|
||||
Kind = ExportBundleKind.Evidence,
|
||||
Materials = new[]
|
||||
{
|
||||
new ExportMaterialInput
|
||||
{
|
||||
Section = "reports",
|
||||
Path = "z.json",
|
||||
Sha256 = "ABCDEF",
|
||||
SizeBytes = 10,
|
||||
MediaType = "application/json"
|
||||
},
|
||||
new ExportMaterialInput
|
||||
{
|
||||
Section = "reports",
|
||||
Path = "a.json",
|
||||
Sha256 = "123456",
|
||||
SizeBytes = 20,
|
||||
MediaType = "application/json"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = await client.PushSnapshotAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal("00000000000000000000000000000001", result.BundleId);
|
||||
|
||||
var manifest = await client.GetBundleAsync(result.BundleId!, request.TenantId);
|
||||
Assert.NotNull(manifest);
|
||||
Assert.Equal(timeProvider.GetUtcNow(), manifest!.CreatedAt);
|
||||
|
||||
var paths = manifest.Entries.Select(e => e.CanonicalPath).ToList();
|
||||
var sorted = paths.OrderBy(p => p, StringComparer.Ordinal).ToList();
|
||||
Assert.Equal(sorted, paths);
|
||||
|
||||
Assert.All(manifest.Entries, entry => Assert.Equal(entry.Sha256, entry.Sha256.ToLowerInvariant()));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.ExceptionReport;
|
||||
using StellaOps.Policy.Exceptions.Models;
|
||||
using StellaOps.Policy.Exceptions.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.ExceptionReport;
|
||||
|
||||
public sealed class ExceptionReportGeneratorTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CreateReportAsync_UsesGuidProvider()
|
||||
{
|
||||
var tenantId = Guid.NewGuid();
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 11, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var exceptionRepo = new Mock<IExceptionRepository>();
|
||||
var applicationRepo = new Mock<IExceptionApplicationRepository>();
|
||||
|
||||
exceptionRepo
|
||||
.Setup(repo => repo.GetByFilterAsync(It.IsAny<ExceptionFilter>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(Array.Empty<ExceptionObject>());
|
||||
|
||||
var generator = new ExceptionReportGenerator(
|
||||
exceptionRepo.Object,
|
||||
applicationRepo.Object,
|
||||
NullLogger<ExceptionReportGenerator>.Instance,
|
||||
guidProvider,
|
||||
timeProvider);
|
||||
|
||||
var response = await generator.CreateReportAsync(new ExceptionReportRequest
|
||||
{
|
||||
TenantId = tenantId,
|
||||
RequesterId = "user-1",
|
||||
Format = "json"
|
||||
});
|
||||
|
||||
Assert.StartsWith("exc-rpt-", response.JobId);
|
||||
Assert.EndsWith("00000000000000000000000000000001", response.JobId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CreateReportAsync_SummaryKeysAreOrdered()
|
||||
{
|
||||
var tenantId = Guid.NewGuid();
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 11, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var exceptionRepo = new Mock<IExceptionRepository>();
|
||||
var applicationRepo = new Mock<IExceptionApplicationRepository>();
|
||||
|
||||
var exceptions = new[]
|
||||
{
|
||||
CreateException("exc-2", ExceptionStatus.Revoked, tenantId, timeProvider.GetUtcNow()),
|
||||
CreateException("exc-1", ExceptionStatus.Active, tenantId, timeProvider.GetUtcNow())
|
||||
};
|
||||
|
||||
exceptionRepo
|
||||
.Setup(repo => repo.GetByFilterAsync(It.IsAny<ExceptionFilter>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(exceptions);
|
||||
|
||||
var generator = new ExceptionReportGenerator(
|
||||
exceptionRepo.Object,
|
||||
applicationRepo.Object,
|
||||
NullLogger<ExceptionReportGenerator>.Instance,
|
||||
guidProvider,
|
||||
timeProvider);
|
||||
|
||||
var response = await generator.CreateReportAsync(new ExceptionReportRequest
|
||||
{
|
||||
TenantId = tenantId,
|
||||
RequesterId = "user-1",
|
||||
Format = "json"
|
||||
});
|
||||
|
||||
var content = await WaitForContentAsync(generator, response.JobId);
|
||||
using var document = JsonDocument.Parse(content.Content);
|
||||
var byStatus = document.RootElement.GetProperty("summary").GetProperty("byStatus");
|
||||
var keys = byStatus.EnumerateObject().Select(p => p.Name).ToList();
|
||||
|
||||
Assert.Equal(new[] { "Active", "Revoked" }, keys);
|
||||
}
|
||||
|
||||
private static ExceptionObject CreateException(
|
||||
string exceptionId,
|
||||
ExceptionStatus status,
|
||||
Guid tenantId,
|
||||
DateTimeOffset now)
|
||||
{
|
||||
return new ExceptionObject
|
||||
{
|
||||
ExceptionId = exceptionId,
|
||||
Version = 1,
|
||||
Status = status,
|
||||
Type = ExceptionType.Vulnerability,
|
||||
Scope = new ExceptionScope
|
||||
{
|
||||
TenantId = tenantId,
|
||||
VulnerabilityId = "CVE-2024-0001"
|
||||
},
|
||||
OwnerId = "owner-1",
|
||||
RequesterId = "requester-1",
|
||||
CreatedAt = now,
|
||||
UpdatedAt = now,
|
||||
ExpiresAt = now.AddDays(30),
|
||||
ReasonCode = ExceptionReason.AcceptedRisk,
|
||||
Rationale = new string('a', 60),
|
||||
EvidenceRefs = ImmutableArray<string>.Empty,
|
||||
CompensatingControls = ImmutableArray<string>.Empty,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<ExceptionReportContent> WaitForContentAsync(
|
||||
IExceptionReportGenerator generator,
|
||||
string jobId)
|
||||
{
|
||||
var timeout = TimeSpan.FromSeconds(2);
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
while (stopwatch.Elapsed < timeout)
|
||||
{
|
||||
var content = await generator.GetReportContentAsync(jobId);
|
||||
if (content is not null)
|
||||
{
|
||||
return content;
|
||||
}
|
||||
|
||||
await Task.Delay(TimeSpan.FromMilliseconds(10));
|
||||
}
|
||||
|
||||
throw new TimeoutException("Report content not available.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,127 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.Incident;
|
||||
using StellaOps.ExportCenter.WebService.Timeline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.Incident;
|
||||
|
||||
public sealed class ExportIncidentManagerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ActivateIncidentAsync_UsesGuidProvider()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 12, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var manager = new ExportIncidentManager(
|
||||
NullLogger<ExportIncidentManager>.Instance,
|
||||
new FakeTimelinePublisher(),
|
||||
new FakeNotificationEmitter(),
|
||||
guidProvider,
|
||||
timeProvider);
|
||||
|
||||
var request = new ExportIncidentActivationRequest
|
||||
{
|
||||
Type = ExportIncidentType.SecurityIncident,
|
||||
Severity = ExportIncidentSeverity.Critical,
|
||||
Summary = "Test incident",
|
||||
Description = "Test description",
|
||||
ActivatedBy = "tester"
|
||||
};
|
||||
|
||||
var result = await manager.ActivateIncidentAsync(request);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.NotNull(result.Incident);
|
||||
|
||||
var expectedGuid = new Guid("00000000-0000-0000-0000-000000000001");
|
||||
var expectedId = $"inc-{expectedGuid:N}"[..20];
|
||||
Assert.Equal(expectedId, result.Incident!.IncidentId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetRecentIncidentsAsync_PrunesResolvedByRetention()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 12, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var options = Options.Create(new ExportIncidentManagerOptions
|
||||
{
|
||||
RetentionPeriod = TimeSpan.FromMinutes(30),
|
||||
MaxIncidentCount = 0
|
||||
});
|
||||
|
||||
var manager = new ExportIncidentManager(
|
||||
NullLogger<ExportIncidentManager>.Instance,
|
||||
new FakeTimelinePublisher(),
|
||||
new FakeNotificationEmitter(),
|
||||
guidProvider,
|
||||
timeProvider,
|
||||
options);
|
||||
|
||||
var activation = await manager.ActivateIncidentAsync(new ExportIncidentActivationRequest
|
||||
{
|
||||
Type = ExportIncidentType.SecurityIncident,
|
||||
Severity = ExportIncidentSeverity.Critical,
|
||||
Summary = "Retention test",
|
||||
ActivatedBy = "tester"
|
||||
});
|
||||
|
||||
var incidentId = activation.Incident!.IncidentId;
|
||||
|
||||
await manager.ResolveIncidentAsync(incidentId, new ExportIncidentResolutionRequest
|
||||
{
|
||||
ResolutionMessage = "Resolved",
|
||||
ResolvedBy = "tester",
|
||||
IsFalsePositive = false
|
||||
});
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromHours(1));
|
||||
|
||||
var recent = await manager.GetRecentIncidentsAsync(limit: 10, includeResolved: true);
|
||||
|
||||
Assert.Empty(recent);
|
||||
}
|
||||
|
||||
private sealed class FakeTimelinePublisher : IExportTimelinePublisher
|
||||
{
|
||||
public Task<TimelinePublishResult> PublishStartedAsync(ExportStartedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("started"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishCompletedAsync(ExportCompletedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("completed"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishFailedAsync(ExportFailedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("failed"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishCancelledAsync(ExportCancelledEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("cancelled"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishArtifactCreatedAsync(ExportArtifactCreatedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("artifact"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishEventAsync(ExportTimelineEventBase @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("event"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishIncidentEventAsync(
|
||||
string eventType,
|
||||
string incidentId,
|
||||
string eventJson,
|
||||
string? correlationId,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("incident"));
|
||||
}
|
||||
|
||||
private sealed class FakeNotificationEmitter : IExportNotificationEmitter
|
||||
{
|
||||
public Task EmitIncidentActivatedAsync(ExportIncident incident, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task EmitIncidentUpdatedAsync(ExportIncident incident, string updateMessage, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
|
||||
public Task EmitIncidentResolvedAsync(ExportIncident incident, string resolutionMessage, bool isFalsePositive, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.RiskBundle;
|
||||
using StellaOps.ExportCenter.WebService.Timeline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.RiskBundle;
|
||||
|
||||
public sealed class RiskBundleJobHandlerTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task SubmitJobAsync_UsesGuidProvider()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 9, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var handler = new RiskBundleJobHandler(
|
||||
timeProvider,
|
||||
guidProvider,
|
||||
NullLogger<RiskBundleJobHandler>.Instance,
|
||||
new FakeTimelinePublisher());
|
||||
|
||||
var request = new RiskBundleJobSubmitRequest
|
||||
{
|
||||
TenantId = "tenant-1"
|
||||
};
|
||||
|
||||
var result = await handler.SubmitJobAsync(request, "actor");
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal("00000000000000000000000000000001", result.JobId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitJobAsync_RespectsMaxConcurrentJobs()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 9, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var options = Options.Create(new RiskBundleJobHandlerOptions
|
||||
{
|
||||
MaxConcurrentJobs = 1,
|
||||
JobTimeout = TimeSpan.FromMinutes(5),
|
||||
JobRetentionPeriod = TimeSpan.FromHours(1)
|
||||
});
|
||||
|
||||
var handler = new RiskBundleJobHandler(
|
||||
timeProvider,
|
||||
guidProvider,
|
||||
NullLogger<RiskBundleJobHandler>.Instance,
|
||||
new FakeTimelinePublisher(),
|
||||
options);
|
||||
|
||||
var request = new RiskBundleJobSubmitRequest
|
||||
{
|
||||
TenantId = "tenant-1"
|
||||
};
|
||||
|
||||
var first = await handler.SubmitJobAsync(request, "actor");
|
||||
var second = await handler.SubmitJobAsync(request, "actor");
|
||||
|
||||
Assert.True(first.Success);
|
||||
Assert.False(second.Success);
|
||||
Assert.Equal("Maximum concurrent jobs reached", second.ErrorMessage);
|
||||
}
|
||||
|
||||
private sealed class FakeTimelinePublisher : IExportTimelinePublisher
|
||||
{
|
||||
public Task<TimelinePublishResult> PublishStartedAsync(ExportStartedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("started"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishCompletedAsync(ExportCompletedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("completed"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishFailedAsync(ExportFailedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("failed"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishCancelledAsync(ExportCancelledEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("cancelled"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishArtifactCreatedAsync(ExportArtifactCreatedEvent @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("artifact"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishEventAsync(ExportTimelineEventBase @event, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("event"));
|
||||
|
||||
public Task<TimelinePublishResult> PublishIncidentEventAsync(
|
||||
string eventType,
|
||||
string incidentId,
|
||||
string eventJson,
|
||||
string? correlationId,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(TimelinePublishResult.Succeeded("incident"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Tests;
|
||||
using StellaOps.ExportCenter.WebService.SimulationExport;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.SimulationExport;
|
||||
|
||||
public sealed class SimulationReportExporterTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task ExportAsync_UsesGuidProvider()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 13, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var exporter = new SimulationReportExporter(
|
||||
timeProvider,
|
||||
guidProvider,
|
||||
NullLogger<SimulationReportExporter>.Instance);
|
||||
|
||||
var simulationId = (await exporter.GetAvailableSimulationsAsync(null)).Simulations.First().SimulationId;
|
||||
var result = await exporter.ExportAsync(new SimulationExportRequest
|
||||
{
|
||||
SimulationId = simulationId,
|
||||
Format = SimulationExportFormat.Json
|
||||
});
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.Equal("exp-00000000000000000000000000000001", result.ExportId);
|
||||
Assert.Equal(timeProvider.GetUtcNow(), result.CreatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_PrunesMaxExports()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 2, 13, 0, 0, TimeSpan.Zero));
|
||||
var guidProvider = new SequentialGuidProvider();
|
||||
var options = Options.Create(new SimulationReportExporterOptions
|
||||
{
|
||||
MaxExports = 1,
|
||||
MaxSimulations = 0,
|
||||
RetentionPeriod = TimeSpan.Zero
|
||||
});
|
||||
|
||||
var exporter = new SimulationReportExporter(
|
||||
timeProvider,
|
||||
guidProvider,
|
||||
NullLogger<SimulationReportExporter>.Instance,
|
||||
options);
|
||||
|
||||
var simulationId = (await exporter.GetAvailableSimulationsAsync(null)).Simulations.First().SimulationId;
|
||||
|
||||
var first = await exporter.ExportAsync(new SimulationExportRequest
|
||||
{
|
||||
SimulationId = simulationId,
|
||||
Format = SimulationExportFormat.Json
|
||||
});
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(5));
|
||||
|
||||
var second = await exporter.ExportAsync(new SimulationExportRequest
|
||||
{
|
||||
SimulationId = simulationId,
|
||||
Format = SimulationExportFormat.Json
|
||||
});
|
||||
|
||||
await exporter.GetAvailableSimulationsAsync(null);
|
||||
|
||||
var removed = await exporter.GetExportDocumentAsync(first.ExportId);
|
||||
var retained = await exporter.GetExportDocumentAsync(second.ExportId);
|
||||
|
||||
Assert.Null(removed);
|
||||
Assert.NotNull(retained);
|
||||
}
|
||||
}
|
||||
@@ -781,15 +781,9 @@ public static class ExportApiEndpoints
|
||||
Description = profile.Description,
|
||||
Kind = profile.Kind,
|
||||
Status = profile.Status,
|
||||
Scope = profile.ScopeJson is not null
|
||||
? JsonSerializer.Deserialize<ExportScope>(profile.ScopeJson)
|
||||
: null,
|
||||
Format = profile.FormatJson is not null
|
||||
? JsonSerializer.Deserialize<ExportFormatOptions>(profile.FormatJson)
|
||||
: null,
|
||||
Signing = profile.SigningJson is not null
|
||||
? JsonSerializer.Deserialize<ExportSigningOptions>(profile.SigningJson)
|
||||
: null,
|
||||
Scope = TryDeserialize<ExportScope>(profile.ScopeJson),
|
||||
Format = TryDeserialize<ExportFormatOptions>(profile.FormatJson),
|
||||
Signing = TryDeserialize<ExportSigningOptions>(profile.SigningJson),
|
||||
Schedule = profile.Schedule,
|
||||
CreatedAt = profile.CreatedAt,
|
||||
UpdatedAt = profile.UpdatedAt,
|
||||
@@ -866,6 +860,23 @@ public static class ExportApiEndpoints
|
||||
};
|
||||
}
|
||||
|
||||
private static T? TryDeserialize<T>(string? json)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(json))
|
||||
{
|
||||
return default;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return JsonSerializer.Deserialize<T>(json);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return default;
|
||||
}
|
||||
}
|
||||
|
||||
// ========================================================================
|
||||
// Verification endpoint registration
|
||||
// ========================================================================
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.Api;
|
||||
|
||||
@@ -10,11 +11,10 @@ public static class ExportApiServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds export API services to the service collection.
|
||||
/// Uses in-memory repositories by default.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddExportApiServices(this IServiceCollection services)
|
||||
{
|
||||
return services.AddExportApiServices(_ => { });
|
||||
return services.AddExportApiServices(_ => { }, allowInMemoryRepositories: false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -22,7 +22,8 @@ public static class ExportApiServiceCollectionExtensions
|
||||
/// </summary>
|
||||
public static IServiceCollection AddExportApiServices(
|
||||
this IServiceCollection services,
|
||||
Action<ExportConcurrencyOptions> configureConcurrency)
|
||||
Action<ExportConcurrencyOptions> configureConcurrency,
|
||||
bool allowInMemoryRepositories)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configureConcurrency);
|
||||
@@ -32,6 +33,13 @@ public static class ExportApiServiceCollectionExtensions
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
|
||||
if (!allowInMemoryRepositories)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
"In-memory export repositories are disabled. Register persistent repositories or set Export:AllowInMemoryRepositories to true.");
|
||||
}
|
||||
|
||||
// Register repositories (in-memory by default)
|
||||
services.TryAddSingleton<IExportProfileRepository, InMemoryExportProfileRepository>();
|
||||
@@ -64,6 +72,7 @@ public static class ExportApiServiceCollectionExtensions
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
|
||||
// Register custom repositories
|
||||
services.TryAddSingleton<IExportProfileRepository, TProfileRepo>();
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
using StellaOps.ExportCenter.WebService.Telemetry;
|
||||
|
||||
@@ -104,13 +105,16 @@ public sealed class ExportAuditService : IExportAuditService
|
||||
{
|
||||
private readonly ILogger<ExportAuditService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public ExportAuditService(
|
||||
ILogger<ExportAuditService> logger,
|
||||
IGuidProvider guidProvider,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider;
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
}
|
||||
|
||||
public Task LogProfileOperationAsync(
|
||||
@@ -225,7 +229,7 @@ public sealed class ExportAuditService : IExportAuditService
|
||||
|
||||
return new ExportAuditEntry
|
||||
{
|
||||
AuditId = Guid.NewGuid(),
|
||||
AuditId = _guidProvider.NewGuid(),
|
||||
Operation = operation,
|
||||
TenantId = tenantId,
|
||||
UserId = userId,
|
||||
|
||||
@@ -11,10 +11,14 @@ public sealed class InMemoryExportProfileRepository : IExportProfileRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<(Guid TenantId, Guid ProfileId), ExportProfile> _profiles = new();
|
||||
private readonly ILogger<InMemoryExportProfileRepository> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public InMemoryExportProfileRepository(ILogger<InMemoryExportProfileRepository> logger)
|
||||
public InMemoryExportProfileRepository(
|
||||
ILogger<InMemoryExportProfileRepository> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task<ExportProfile?> GetByIdAsync(
|
||||
@@ -112,8 +116,8 @@ public sealed class InMemoryExportProfileRepository : IExportProfileRepository
|
||||
var archived = existing with
|
||||
{
|
||||
Status = ExportProfileStatus.Archived,
|
||||
ArchivedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
ArchivedAt = _timeProvider.GetUtcNow(),
|
||||
UpdatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
if (!_profiles.TryUpdate(key, archived, existing))
|
||||
@@ -165,10 +169,14 @@ public sealed class InMemoryExportRunRepository : IExportRunRepository
|
||||
{
|
||||
private readonly ConcurrentDictionary<(Guid TenantId, Guid RunId), ExportRun> _runs = new();
|
||||
private readonly ILogger<InMemoryExportRunRepository> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public InMemoryExportRunRepository(ILogger<InMemoryExportRunRepository> logger)
|
||||
public InMemoryExportRunRepository(
|
||||
ILogger<InMemoryExportRunRepository> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task<ExportRun?> GetByIdAsync(
|
||||
@@ -277,24 +285,11 @@ public sealed class InMemoryExportRunRepository : IExportRunRepository
|
||||
if (existing.Status != ExportRunStatus.Queued && existing.Status != ExportRunStatus.Running)
|
||||
return Task.FromResult(false);
|
||||
|
||||
var cancelled = new ExportRun
|
||||
var cancelled = existing with
|
||||
{
|
||||
RunId = existing.RunId,
|
||||
ProfileId = existing.ProfileId,
|
||||
TenantId = existing.TenantId,
|
||||
Status = ExportRunStatus.Cancelled,
|
||||
Trigger = existing.Trigger,
|
||||
CorrelationId = existing.CorrelationId,
|
||||
InitiatedBy = existing.InitiatedBy,
|
||||
TotalItems = existing.TotalItems,
|
||||
ProcessedItems = existing.ProcessedItems,
|
||||
FailedItems = existing.FailedItems,
|
||||
TotalSizeBytes = existing.TotalSizeBytes,
|
||||
ErrorJson = null,
|
||||
CreatedAt = existing.CreatedAt,
|
||||
StartedAt = existing.StartedAt,
|
||||
CompletedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = existing.ExpiresAt
|
||||
CompletedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
_runs[key] = cancelled;
|
||||
@@ -339,12 +334,27 @@ public sealed class InMemoryExportRunRepository : IExportRunRepository
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var nextRun = _runs.Values
|
||||
var candidates = _runs.Values
|
||||
.Where(r => r.TenantId == tenantId && r.Status == ExportRunStatus.Queued)
|
||||
.OrderBy(r => r.CreatedAt)
|
||||
.FirstOrDefault();
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult(nextRun);
|
||||
foreach (var candidate in candidates)
|
||||
{
|
||||
var key = (candidate.TenantId, candidate.RunId);
|
||||
var updated = candidate with
|
||||
{
|
||||
Status = ExportRunStatus.Running,
|
||||
StartedAt = candidate.StartedAt ?? _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
if (_runs.TryUpdate(key, updated, candidate))
|
||||
{
|
||||
return Task.FromResult<ExportRun?>(updated);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult<ExportRun?>(null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.ExportCenter.Client.Models;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.AuditBundle;
|
||||
|
||||
@@ -17,16 +18,21 @@ public sealed class AuditBundleJobHandler : IAuditBundleJobHandler
|
||||
private readonly ConcurrentDictionary<string, AuditBundleJob> _jobs = new();
|
||||
private readonly ILogger<AuditBundleJobHandler> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public AuditBundleJobHandler(ILogger<AuditBundleJobHandler> logger, TimeProvider? timeProvider = null)
|
||||
public AuditBundleJobHandler(
|
||||
ILogger<AuditBundleJobHandler> logger,
|
||||
IGuidProvider guidProvider,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
}
|
||||
|
||||
public Task<AuditBundleCreateResult> CreateBundleAsync(
|
||||
@@ -50,7 +56,7 @@ public sealed class AuditBundleJobHandler : IAuditBundleJobHandler
|
||||
new ErrorDetail("INVALID_REQUEST", "Subject name is required")));
|
||||
}
|
||||
|
||||
var bundleId = $"bndl-{Guid.NewGuid():N}";
|
||||
var bundleId = $"bndl-{_guidProvider.NewGuid():N}";
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var job = new AuditBundleJob
|
||||
@@ -73,7 +79,7 @@ public sealed class AuditBundleJobHandler : IAuditBundleJobHandler
|
||||
|
||||
// In a real implementation, this would enqueue a background job
|
||||
// For now, we'll process it synchronously in-memory
|
||||
_ = Task.Run(async () => await ProcessBundleAsync(bundleId, cancellationToken), cancellationToken);
|
||||
_ = ProcessBundleAsync(bundleId, cancellationToken);
|
||||
|
||||
var response = new CreateAuditBundleResponse(
|
||||
bundleId,
|
||||
@@ -280,7 +286,14 @@ public sealed class AuditBundleJobHandler : IAuditBundleJobHandler
|
||||
"Completed audit bundle {BundleId} with hash {BundleHash}",
|
||||
bundleId, job.BundleHash);
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
job.Status = "Cancelled";
|
||||
job.ErrorCode = "CANCELLED";
|
||||
job.ErrorMessage = "Bundle generation cancelled.";
|
||||
job.CompletedAt = _timeProvider.GetUtcNow();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to process audit bundle {BundleId}", bundleId);
|
||||
job.Status = "Failed";
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.AuditBundle;
|
||||
|
||||
/// <summary>
|
||||
@@ -10,6 +14,8 @@ public static class AuditBundleServiceCollectionExtensions
|
||||
/// </summary>
|
||||
public static IServiceCollection AddAuditBundleJobHandler(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
services.AddSingleton<IAuditBundleJobHandler, AuditBundleJobHandler>();
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -85,6 +85,7 @@ public static class DeprecationHeaderExtensions
|
||||
return async (context, next) =>
|
||||
{
|
||||
var httpContext = context.HttpContext;
|
||||
var timeProvider = httpContext.RequestServices.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
|
||||
// Add deprecation headers
|
||||
httpContext.AddDeprecationHeaders(info);
|
||||
@@ -99,7 +100,7 @@ public static class DeprecationHeaderExtensions
|
||||
httpContext.Connection.RemoteIpAddress);
|
||||
|
||||
// If past sunset, optionally return 410 Gone
|
||||
if (info.IsPastSunset)
|
||||
if (info.IsPastSunsetAt(timeProvider))
|
||||
{
|
||||
logger?.LogError(
|
||||
"Sunset endpoint accessed after removal date: {Method} {Path} - Was removed: {Sunset}",
|
||||
|
||||
@@ -18,10 +18,21 @@ public sealed record DeprecationInfo(
|
||||
/// <summary>
|
||||
/// Returns true if the sunset date has passed.
|
||||
/// </summary>
|
||||
public bool IsPastSunset => DateTimeOffset.UtcNow >= SunsetAt;
|
||||
public bool IsPastSunset => IsPastSunsetAt(TimeProvider.System);
|
||||
|
||||
/// <summary>
|
||||
/// Days remaining until sunset.
|
||||
/// </summary>
|
||||
public int DaysUntilSunset => Math.Max(0, (int)(SunsetAt - DateTimeOffset.UtcNow).TotalDays);
|
||||
public int DaysUntilSunset => DaysUntilSunsetAt(TimeProvider.System);
|
||||
|
||||
/// <summary>
|
||||
/// Returns true if the sunset date has passed, using the provided time provider.
|
||||
/// </summary>
|
||||
public bool IsPastSunsetAt(TimeProvider timeProvider) => timeProvider.GetUtcNow() >= SunsetAt;
|
||||
|
||||
/// <summary>
|
||||
/// Days remaining until sunset, using the provided time provider.
|
||||
/// </summary>
|
||||
public int DaysUntilSunsetAt(TimeProvider timeProvider) =>
|
||||
Math.Max(0, (int)(SunsetAt - timeProvider.GetUtcNow()).TotalDays);
|
||||
}
|
||||
|
||||
@@ -44,10 +44,14 @@ public sealed record DeprecationClientInfo(
|
||||
public sealed class DeprecationNotificationService : IDeprecationNotificationService
|
||||
{
|
||||
private readonly ILogger<DeprecationNotificationService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public DeprecationNotificationService(ILogger<DeprecationNotificationService> logger)
|
||||
public DeprecationNotificationService(
|
||||
ILogger<DeprecationNotificationService> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task RecordDeprecatedAccessAsync(
|
||||
@@ -67,7 +71,7 @@ public sealed class DeprecationNotificationService : IDeprecationNotificationSer
|
||||
path,
|
||||
info.DeprecatedAt,
|
||||
info.SunsetAt,
|
||||
info.DaysUntilSunset,
|
||||
info.DaysUntilSunsetAt(_timeProvider),
|
||||
info.SuccessorPath,
|
||||
clientInfo.ClientIp,
|
||||
clientInfo.UserAgent,
|
||||
@@ -81,7 +85,7 @@ public sealed class DeprecationNotificationService : IDeprecationNotificationSer
|
||||
new KeyValuePair<string, object?>("method", method),
|
||||
new KeyValuePair<string, object?>("path", path),
|
||||
new KeyValuePair<string, object?>("successor", info.SuccessorPath),
|
||||
new KeyValuePair<string, object?>("days_until_sunset", info.DaysUntilSunset));
|
||||
new KeyValuePair<string, object?>("days_until_sunset", info.DaysUntilSunsetAt(_timeProvider)));
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Globalization;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.Distribution;
|
||||
@@ -13,14 +14,17 @@ public sealed class ExportDistributionLifecycle : IExportDistributionLifecycle
|
||||
private readonly IExportDistributionRepository _repository;
|
||||
private readonly ILogger<ExportDistributionLifecycle> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public ExportDistributionLifecycle(
|
||||
IExportDistributionRepository repository,
|
||||
ILogger<ExportDistributionLifecycle> logger,
|
||||
IGuidProvider guidProvider,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
@@ -53,7 +57,7 @@ public sealed class ExportDistributionLifecycle : IExportDistributionLifecycle
|
||||
|
||||
var distribution = new ExportDistribution
|
||||
{
|
||||
DistributionId = Guid.NewGuid(),
|
||||
DistributionId = _guidProvider.NewGuid(),
|
||||
RunId = runId,
|
||||
TenantId = tenantId,
|
||||
Kind = target.Kind,
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.WebService.Distribution.Oci;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.Distribution;
|
||||
@@ -13,6 +15,8 @@ public static class ExportDistributionServiceCollectionExtensions
|
||||
/// </summary>
|
||||
public static IServiceCollection AddExportDistribution(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
services.AddSingleton<IExportDistributionRepository, InMemoryExportDistributionRepository>();
|
||||
services.AddSingleton<IExportDistributionLifecycle, ExportDistributionLifecycle>();
|
||||
|
||||
@@ -25,6 +29,8 @@ public static class ExportDistributionServiceCollectionExtensions
|
||||
public static IServiceCollection AddExportDistribution<TRepository>(this IServiceCollection services)
|
||||
where TRepository : class, IExportDistributionRepository
|
||||
{
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
services.AddSingleton<IExportDistributionRepository, TRepository>();
|
||||
services.AddSingleton<IExportDistributionLifecycle, ExportDistributionLifecycle>();
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Concurrent;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.ExportCenter.Core.Domain;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.Distribution;
|
||||
@@ -10,6 +11,16 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, ExportDistribution> _distributions = new();
|
||||
private readonly ConcurrentDictionary<string, Guid> _idempotencyIndex = new();
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly InMemoryExportDistributionOptions _options;
|
||||
|
||||
public InMemoryExportDistributionRepository(
|
||||
TimeProvider timeProvider,
|
||||
IOptions<InMemoryExportDistributionOptions>? options = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_options = options?.Value ?? InMemoryExportDistributionOptions.Default;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ExportDistribution?> GetByIdAsync(
|
||||
@@ -17,6 +28,8 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
Guid distributionId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneStale(_timeProvider.GetUtcNow());
|
||||
|
||||
_distributions.TryGetValue(distributionId, out var distribution);
|
||||
|
||||
if (distribution is not null && distribution.TenantId != tenantId)
|
||||
@@ -47,6 +60,8 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
Guid runId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneStale(_timeProvider.GetUtcNow());
|
||||
|
||||
var distributions = _distributions.Values
|
||||
.Where(d => d.TenantId == tenantId && d.RunId == runId)
|
||||
.OrderBy(d => d.CreatedAt)
|
||||
@@ -62,6 +77,8 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneStale(_timeProvider.GetUtcNow());
|
||||
|
||||
var distributions = _distributions.Values
|
||||
.Where(d => d.TenantId == tenantId && d.Status == status)
|
||||
.OrderBy(d => d.CreatedAt)
|
||||
@@ -77,6 +94,8 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
int limit = 100,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneStale(asOf);
|
||||
|
||||
var expired = _distributions.Values
|
||||
.Where(d =>
|
||||
d.RetentionExpiresAt.HasValue &&
|
||||
@@ -94,6 +113,8 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
ExportDistribution distribution,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneStale(_timeProvider.GetUtcNow());
|
||||
|
||||
if (!_distributions.TryAdd(distribution.DistributionId, distribution))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
@@ -193,7 +214,7 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
DistributedAt = distribution.DistributedAt,
|
||||
VerifiedAt = distribution.VerifiedAt,
|
||||
UpdatedAt = distribution.UpdatedAt,
|
||||
DeletedAt = DateTimeOffset.UtcNow
|
||||
DeletedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
_distributions[distributionId] = updated;
|
||||
@@ -232,6 +253,8 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
Guid runId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneStale(_timeProvider.GetUtcNow());
|
||||
|
||||
var distributions = _distributions.Values
|
||||
.Where(d => d.TenantId == tenantId && d.RunId == runId)
|
||||
.ToList();
|
||||
@@ -259,4 +282,61 @@ public sealed class InMemoryExportDistributionRepository : IExportDistributionRe
|
||||
_distributions.Clear();
|
||||
_idempotencyIndex.Clear();
|
||||
}
|
||||
|
||||
private void PruneStale(DateTimeOffset now)
|
||||
{
|
||||
if (_options.RetentionPeriod > TimeSpan.Zero)
|
||||
{
|
||||
var cutoff = now - _options.RetentionPeriod;
|
||||
foreach (var (distributionId, distribution) in _distributions)
|
||||
{
|
||||
if (distribution.CreatedAt < cutoff)
|
||||
{
|
||||
RemoveDistribution(distributionId, distribution);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_options.MaxEntries > 0 && _distributions.Count > _options.MaxEntries)
|
||||
{
|
||||
var excess = _distributions.Count - _options.MaxEntries;
|
||||
var toRemove = _distributions
|
||||
.OrderBy(kvp => kvp.Value.CreatedAt)
|
||||
.Take(excess)
|
||||
.ToList();
|
||||
|
||||
foreach (var entry in toRemove)
|
||||
{
|
||||
RemoveDistribution(entry.Key, entry.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void RemoveDistribution(Guid distributionId, ExportDistribution distribution)
|
||||
{
|
||||
_distributions.TryRemove(distributionId, out _);
|
||||
|
||||
if (!string.IsNullOrEmpty(distribution.IdempotencyKey))
|
||||
{
|
||||
_idempotencyIndex.TryRemove(distribution.IdempotencyKey, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for in-memory distribution retention.
|
||||
/// </summary>
|
||||
public sealed record InMemoryExportDistributionOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum number of distributions to keep in memory.
|
||||
/// </summary>
|
||||
public int MaxEntries { get; init; } = 500;
|
||||
|
||||
/// <summary>
|
||||
/// Retention period for in-memory distributions.
|
||||
/// </summary>
|
||||
public TimeSpan RetentionPeriod { get; init; } = TimeSpan.FromHours(24);
|
||||
|
||||
public static InMemoryExportDistributionOptions Default => new();
|
||||
}
|
||||
|
||||
@@ -29,6 +29,11 @@ public sealed class OciDistributionOptions
|
||||
/// </summary>
|
||||
public bool AllowHttpRegistries { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to allow invalid TLS certificates (testing only).
|
||||
/// </summary>
|
||||
public bool AllowInsecureTls { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry attempts for registry operations.
|
||||
/// </summary>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.Distribution.Oci;
|
||||
|
||||
@@ -28,10 +29,17 @@ public static class OciDistributionServiceExtensions
|
||||
client.DefaultRequestHeaders.Add("User-Agent", "StellaOps-ExportCenter/1.0");
|
||||
client.DefaultRequestHeaders.Add("Accept", "application/json");
|
||||
})
|
||||
.ConfigurePrimaryHttpMessageHandler(() => new HttpClientHandler
|
||||
.ConfigurePrimaryHttpMessageHandler(sp =>
|
||||
{
|
||||
// Allow configurable TLS validation (for testing with self-signed certs)
|
||||
ServerCertificateCustomValidationCallback = (_, _, _, _) => true
|
||||
var options = sp.GetRequiredService<IOptions<OciDistributionOptions>>().Value;
|
||||
var handler = new HttpClientHandler();
|
||||
if (options.AllowInsecureTls)
|
||||
{
|
||||
handler.ServerCertificateCustomValidationCallback =
|
||||
HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
|
||||
return handler;
|
||||
});
|
||||
|
||||
// Register the distribution client
|
||||
@@ -58,6 +66,18 @@ public static class OciDistributionServiceExtensions
|
||||
{
|
||||
client.DefaultRequestHeaders.Add("User-Agent", "StellaOps-ExportCenter/1.0");
|
||||
client.DefaultRequestHeaders.Add("Accept", "application/json");
|
||||
})
|
||||
.ConfigurePrimaryHttpMessageHandler(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<OciDistributionOptions>>().Value;
|
||||
var handler = new HttpClientHandler();
|
||||
if (options.AllowInsecureTls)
|
||||
{
|
||||
handler.ServerCertificateCustomValidationCallback =
|
||||
HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
|
||||
return handler;
|
||||
});
|
||||
|
||||
// Register the distribution client
|
||||
|
||||
@@ -423,10 +423,12 @@ public sealed class RegistryGlobalSettings
|
||||
public sealed class OciHttpClientFactory
|
||||
{
|
||||
private readonly OciRegistryConfig _config;
|
||||
private readonly IHttpClientFactory _httpClientFactory;
|
||||
|
||||
public OciHttpClientFactory(OciRegistryConfig config)
|
||||
public OciHttpClientFactory(OciRegistryConfig config, IHttpClientFactory httpClientFactory)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
_httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -435,52 +437,13 @@ public sealed class OciHttpClientFactory
|
||||
public HttpClient CreateClient(string registry)
|
||||
{
|
||||
var endpointConfig = _config.GetEndpointConfig(registry);
|
||||
var handler = CreateHandler(endpointConfig);
|
||||
|
||||
var client = new HttpClient(handler)
|
||||
{
|
||||
Timeout = _config.Global.Timeout
|
||||
};
|
||||
|
||||
var client = _httpClientFactory.CreateClient(OciDistributionOptions.HttpClientName);
|
||||
client.Timeout = _config.Global.Timeout;
|
||||
client.BaseAddress = new Uri(endpointConfig.GetRegistryUrl());
|
||||
client.DefaultRequestHeaders.UserAgent.ParseAdd(_config.Global.UserAgent);
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HTTP message handler with TLS configuration.
|
||||
/// </summary>
|
||||
private static HttpClientHandler CreateHandler(RegistryEndpointConfig config)
|
||||
{
|
||||
var handler = new HttpClientHandler();
|
||||
|
||||
// Configure TLS
|
||||
if (config.Tls is not null)
|
||||
{
|
||||
if (config.Tls.SkipVerify)
|
||||
{
|
||||
handler.ServerCertificateCustomValidationCallback =
|
||||
HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
else
|
||||
{
|
||||
var callback = config.Tls.GetCertificateValidationCallback();
|
||||
if (callback is not null)
|
||||
{
|
||||
handler.ServerCertificateCustomValidationCallback = callback;
|
||||
}
|
||||
}
|
||||
|
||||
// Load client certificate for mTLS
|
||||
var clientCert = config.Tls.LoadClientCertificate();
|
||||
if (clientCert is not null)
|
||||
{
|
||||
handler.ClientCertificates.Add(clientCert);
|
||||
}
|
||||
}
|
||||
|
||||
return handler;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -516,10 +479,10 @@ public sealed record RegistryCapabilities
|
||||
/// <summary>
|
||||
/// When capabilities were probed.
|
||||
/// </summary>
|
||||
public DateTimeOffset ProbedAt { get; init; } = DateTimeOffset.UtcNow;
|
||||
public DateTimeOffset ProbedAt { get; init; } = TimeProvider.System.GetUtcNow();
|
||||
|
||||
/// <summary>
|
||||
/// Whether capabilities are stale and should be re-probed.
|
||||
/// </summary>
|
||||
public bool IsStale(TimeSpan maxAge) => DateTimeOffset.UtcNow - ProbedAt > maxAge;
|
||||
public bool IsStale(TimeSpan maxAge) => TimeProvider.System.GetUtcNow() - ProbedAt > maxAge;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.EvidenceLocker;
|
||||
|
||||
@@ -35,7 +36,13 @@ public static class EvidenceLockerServiceCollectionExtensions
|
||||
var options = serviceProvider.GetService<Microsoft.Extensions.Options.IOptions<ExportEvidenceLockerOptions>>()?.Value
|
||||
?? ExportEvidenceLockerOptions.Default;
|
||||
|
||||
client.BaseAddress = new Uri(options.BaseUrl);
|
||||
if (string.IsNullOrWhiteSpace(options.BaseUrl) ||
|
||||
!Uri.TryCreate(options.BaseUrl, UriKind.Absolute, out var baseUri))
|
||||
{
|
||||
throw new InvalidOperationException("Evidence locker BaseUrl must be a valid absolute URI.");
|
||||
}
|
||||
|
||||
client.BaseAddress = baseUri;
|
||||
client.Timeout = options.Timeout;
|
||||
client.DefaultRequestHeaders.Accept.Add(
|
||||
new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json"));
|
||||
@@ -54,6 +61,8 @@ public static class EvidenceLockerServiceCollectionExtensions
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
services.TryAddSingleton<IExportMerkleTreeCalculator, ExportMerkleTreeCalculator>();
|
||||
services.TryAddSingleton<IExportEvidenceLockerClient, InMemoryExportEvidenceLockerClient>();
|
||||
|
||||
@@ -69,11 +78,17 @@ public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerCl
|
||||
private readonly IExportMerkleTreeCalculator _merkleCalculator;
|
||||
private readonly Dictionary<string, ExportBundleManifest> _bundles = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly object _lock = new();
|
||||
private int _bundleCounter;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public InMemoryExportEvidenceLockerClient(IExportMerkleTreeCalculator merkleCalculator)
|
||||
public InMemoryExportEvidenceLockerClient(
|
||||
IExportMerkleTreeCalculator merkleCalculator,
|
||||
TimeProvider timeProvider,
|
||||
IGuidProvider guidProvider)
|
||||
{
|
||||
_merkleCalculator = merkleCalculator ?? throw new ArgumentNullException(nameof(merkleCalculator));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
}
|
||||
|
||||
public Task<ExportEvidenceSnapshotResult> PushSnapshotAsync(
|
||||
@@ -82,7 +97,7 @@ public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerCl
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var bundleId = Guid.NewGuid().ToString();
|
||||
var bundleId = _guidProvider.NewGuid().ToString("N");
|
||||
var entries = request.Materials.Select(m => new ExportManifestEntry
|
||||
{
|
||||
Section = m.Section,
|
||||
@@ -91,7 +106,9 @@ public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerCl
|
||||
SizeBytes = m.SizeBytes,
|
||||
MediaType = m.MediaType ?? "application/octet-stream",
|
||||
Attributes = m.Attributes
|
||||
}).ToList();
|
||||
})
|
||||
.OrderBy(e => e.CanonicalPath, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var rootHash = _merkleCalculator.CalculateRootHash(entries);
|
||||
|
||||
@@ -102,7 +119,7 @@ public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerCl
|
||||
ProfileId = request.ProfileId,
|
||||
ExportRunId = request.ExportRunId,
|
||||
Kind = request.Kind,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
RootHash = rootHash,
|
||||
Metadata = request.Metadata ?? new Dictionary<string, string>(),
|
||||
Entries = entries,
|
||||
@@ -112,7 +129,6 @@ public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerCl
|
||||
lock (_lock)
|
||||
{
|
||||
_bundles[bundleId] = manifest;
|
||||
_bundleCounter++;
|
||||
}
|
||||
|
||||
return Task.FromResult(ExportEvidenceSnapshotResult.Succeeded(bundleId, rootHash));
|
||||
@@ -186,7 +202,6 @@ public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerCl
|
||||
lock (_lock)
|
||||
{
|
||||
_bundles.Clear();
|
||||
_bundleCounter = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,8 @@ using System.Collections.Concurrent;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.Policy.Exceptions.Models;
|
||||
using StellaOps.Policy.Exceptions.Repositories;
|
||||
|
||||
@@ -20,6 +22,8 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
|
||||
private readonly ConcurrentDictionary<string, ReportJob> _jobs = new();
|
||||
private readonly ILogger<ExceptionReportGenerator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly ExceptionReportGeneratorOptions _options;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
@@ -32,20 +36,25 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
|
||||
IExceptionRepository exceptionRepository,
|
||||
IExceptionApplicationRepository applicationRepository,
|
||||
ILogger<ExceptionReportGenerator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
IGuidProvider guidProvider,
|
||||
TimeProvider? timeProvider = null,
|
||||
IOptions<ExceptionReportGeneratorOptions>? options = null)
|
||||
{
|
||||
_exceptionRepository = exceptionRepository;
|
||||
_applicationRepository = applicationRepository;
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_options = options?.Value ?? ExceptionReportGeneratorOptions.Default;
|
||||
}
|
||||
|
||||
public async Task<ExceptionReportJobResponse> CreateReportAsync(
|
||||
ExceptionReportRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var jobId = $"exc-rpt-{Guid.NewGuid():N}";
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
PruneExpiredJobs(now);
|
||||
var jobId = $"exc-rpt-{_guidProvider.NewGuid():N}";
|
||||
|
||||
var job = new ReportJob
|
||||
{
|
||||
@@ -64,7 +73,7 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
|
||||
jobId, request.TenantId);
|
||||
|
||||
// Start generation in background
|
||||
_ = Task.Run(() => GenerateReportAsync(job, cancellationToken), cancellationToken);
|
||||
_ = GenerateReportAsync(job, cancellationToken);
|
||||
|
||||
return new ExceptionReportJobResponse
|
||||
{
|
||||
@@ -253,12 +262,9 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
|
||||
Summary = new ExceptionReportSummary
|
||||
{
|
||||
TotalExceptions = entries.Count,
|
||||
ByStatus = entries.GroupBy(e => e.Exception.Status)
|
||||
.ToDictionary(g => g.Key, g => g.Count()),
|
||||
ByType = entries.GroupBy(e => e.Exception.Type)
|
||||
.ToDictionary(g => g.Key, g => g.Count()),
|
||||
ByReason = entries.GroupBy(e => e.Exception.ReasonCode)
|
||||
.ToDictionary(g => g.Key, g => g.Count())
|
||||
ByStatus = BuildSummaryMap(entries, e => e.Exception.Status),
|
||||
ByType = BuildSummaryMap(entries, e => e.Exception.Type),
|
||||
ByReason = BuildSummaryMap(entries, e => e.Exception.ReasonCode)
|
||||
},
|
||||
Exceptions = entries
|
||||
};
|
||||
@@ -353,6 +359,52 @@ public sealed class ExceptionReportGenerator : IExceptionReportGenerator
|
||||
FileSizeBytes = job.FileSizeBytes
|
||||
};
|
||||
|
||||
private void PruneExpiredJobs(DateTimeOffset now)
|
||||
{
|
||||
if (_options.RetentionPeriod > TimeSpan.Zero)
|
||||
{
|
||||
var cutoff = now - _options.RetentionPeriod;
|
||||
foreach (var (jobId, job) in _jobs)
|
||||
{
|
||||
var completedAt = job.CompletedAt ?? job.CreatedAt;
|
||||
if (completedAt < cutoff)
|
||||
{
|
||||
_jobs.TryRemove(jobId, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_options.MaxStoredJobs > 0 && _jobs.Count > _options.MaxStoredJobs)
|
||||
{
|
||||
var excess = _jobs.Count - _options.MaxStoredJobs;
|
||||
var toRemove = _jobs
|
||||
.OrderBy(kvp => kvp.Value.CreatedAt)
|
||||
.Take(excess)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in toRemove)
|
||||
{
|
||||
_jobs.TryRemove(key, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static SortedDictionary<string, int> BuildSummaryMap(
|
||||
IEnumerable<ExceptionReportEntry> entries,
|
||||
Func<ExceptionReportEntry, string> keySelector)
|
||||
{
|
||||
var counts = new Dictionary<string, int>(StringComparer.Ordinal);
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var key = keySelector(entry);
|
||||
counts.TryGetValue(key, out var existing);
|
||||
counts[key] = existing + 1;
|
||||
}
|
||||
|
||||
return new SortedDictionary<string, int>(counts, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
private sealed class ReportJob
|
||||
{
|
||||
public required string JobId { get; init; }
|
||||
@@ -399,9 +451,9 @@ internal sealed record ExceptionReportFilter
|
||||
internal sealed record ExceptionReportSummary
|
||||
{
|
||||
public int TotalExceptions { get; init; }
|
||||
public Dictionary<string, int> ByStatus { get; init; } = new();
|
||||
public Dictionary<string, int> ByType { get; init; } = new();
|
||||
public Dictionary<string, int> ByReason { get; init; } = new();
|
||||
public SortedDictionary<string, int> ByStatus { get; init; } = new(StringComparer.Ordinal);
|
||||
public SortedDictionary<string, int> ByType { get; init; } = new(StringComparer.Ordinal);
|
||||
public SortedDictionary<string, int> ByReason { get; init; } = new(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
internal sealed record ExceptionReportEntry
|
||||
@@ -461,3 +513,21 @@ internal sealed record ExceptionReportApplication
|
||||
public required string EffectName { get; init; }
|
||||
public required DateTimeOffset AppliedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for exception report job retention.
|
||||
/// </summary>
|
||||
public sealed record ExceptionReportGeneratorOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum number of stored jobs.
|
||||
/// </summary>
|
||||
public int MaxStoredJobs { get; init; } = 200;
|
||||
|
||||
/// <summary>
|
||||
/// Retention period for stored jobs.
|
||||
/// </summary>
|
||||
public TimeSpan RetentionPeriod { get; init; } = TimeSpan.FromHours(24);
|
||||
|
||||
public static ExceptionReportGeneratorOptions Default => new();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
// Copyright (c) StellaOps Contributors. Licensed under the AGPL-3.0-or-later.
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.ExceptionReport;
|
||||
|
||||
/// <summary>
|
||||
@@ -15,6 +19,8 @@ public static class ExceptionReportServiceCollectionExtensions
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddExceptionReportServices(this IServiceCollection services)
|
||||
{
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
services.AddSingleton<IExceptionReportGenerator, ExceptionReportGenerator>();
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ using System.Collections.Concurrent;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.WebService.Telemetry;
|
||||
using StellaOps.ExportCenter.WebService.Timeline;
|
||||
|
||||
@@ -24,6 +26,8 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
private readonly IExportTimelinePublisher _timelinePublisher;
|
||||
private readonly IExportNotificationEmitter _notificationEmitter;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly ExportIncidentManagerOptions _options;
|
||||
|
||||
// In-memory store for incidents (production would use persistent storage)
|
||||
private readonly ConcurrentDictionary<string, ExportIncident> _incidents = new();
|
||||
@@ -32,12 +36,16 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
ILogger<ExportIncidentManager> logger,
|
||||
IExportTimelinePublisher timelinePublisher,
|
||||
IExportNotificationEmitter notificationEmitter,
|
||||
TimeProvider? timeProvider = null)
|
||||
IGuidProvider guidProvider,
|
||||
TimeProvider? timeProvider = null,
|
||||
IOptions<ExportIncidentManagerOptions>? options = null)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timelinePublisher = timelinePublisher ?? throw new ArgumentNullException(nameof(timelinePublisher));
|
||||
_notificationEmitter = notificationEmitter ?? throw new ArgumentNullException(nameof(notificationEmitter));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_options = options?.Value ?? ExportIncidentManagerOptions.Default;
|
||||
}
|
||||
|
||||
public async Task<ExportIncidentResult> ActivateIncidentAsync(
|
||||
@@ -45,6 +53,7 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
PruneExpiredIncidents(_timeProvider.GetUtcNow());
|
||||
|
||||
try
|
||||
{
|
||||
@@ -355,6 +364,8 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
public Task<ExportIncidentModeStatus> GetIncidentModeStatusAsync(
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneExpiredIncidents(_timeProvider.GetUtcNow());
|
||||
|
||||
var activeIncidents = _incidents.Values
|
||||
.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive))
|
||||
.OrderByDescending(i => i.Severity)
|
||||
@@ -377,6 +388,8 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
public Task<IReadOnlyList<ExportIncident>> GetActiveIncidentsAsync(
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneExpiredIncidents(_timeProvider.GetUtcNow());
|
||||
|
||||
var activeIncidents = _incidents.Values
|
||||
.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive))
|
||||
.OrderByDescending(i => i.Severity)
|
||||
@@ -399,6 +412,8 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
bool includeResolved = true,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
PruneExpiredIncidents(_timeProvider.GetUtcNow());
|
||||
|
||||
var query = _incidents.Values.AsEnumerable();
|
||||
|
||||
if (!includeResolved)
|
||||
@@ -460,14 +475,45 @@ public sealed class ExportIncidentManager : IExportIncidentManager
|
||||
}
|
||||
}
|
||||
|
||||
private static string GenerateIncidentId()
|
||||
private void PruneExpiredIncidents(DateTimeOffset now)
|
||||
{
|
||||
return $"inc-{Guid.NewGuid():N}"[..20];
|
||||
if (_options.RetentionPeriod > TimeSpan.Zero)
|
||||
{
|
||||
var cutoff = now - _options.RetentionPeriod;
|
||||
foreach (var (incidentId, incident) in _incidents)
|
||||
{
|
||||
if (incident.Status is ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive &&
|
||||
incident.LastUpdatedAt < cutoff)
|
||||
{
|
||||
_incidents.TryRemove(incidentId, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_options.MaxIncidentCount > 0 && _incidents.Count > _options.MaxIncidentCount)
|
||||
{
|
||||
var excess = _incidents.Count - _options.MaxIncidentCount;
|
||||
var toRemove = _incidents
|
||||
.OrderBy(kvp => kvp.Value.LastUpdatedAt)
|
||||
.Take(excess)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in toRemove)
|
||||
{
|
||||
_incidents.TryRemove(key, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string GenerateUpdateId()
|
||||
private string GenerateIncidentId()
|
||||
{
|
||||
return $"upd-{Guid.NewGuid():N}"[..16];
|
||||
return $"inc-{_guidProvider.NewGuid():N}"[..20];
|
||||
}
|
||||
|
||||
private string GenerateUpdateId()
|
||||
{
|
||||
return $"upd-{_guidProvider.NewGuid():N}"[..16];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -533,3 +579,21 @@ public sealed class LoggingNotificationEmitter : IExportNotificationEmitter
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for incident retention and limits.
|
||||
/// </summary>
|
||||
public sealed record ExportIncidentManagerOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum number of incidents to retain in memory.
|
||||
/// </summary>
|
||||
public int MaxIncidentCount { get; init; } = 200;
|
||||
|
||||
/// <summary>
|
||||
/// Retention period for resolved incidents.
|
||||
/// </summary>
|
||||
public TimeSpan RetentionPeriod { get; init; } = TimeSpan.FromHours(24);
|
||||
|
||||
public static ExportIncidentManagerOptions Default => new();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.Incident;
|
||||
|
||||
@@ -19,6 +20,7 @@ public static class IncidentServiceCollectionExtensions
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
|
||||
// Register notification emitter
|
||||
services.TryAddSingleton<IExportNotificationEmitter, LoggingNotificationEmitter>();
|
||||
|
||||
@@ -5,6 +5,8 @@ using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http.HttpResults;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService;
|
||||
|
||||
@@ -34,9 +36,16 @@ public static class OpenApiDiscoveryEndpoints
|
||||
public static IEndpointRouteBuilder MapOpenApiDiscovery(this IEndpointRouteBuilder app)
|
||||
{
|
||||
var group = app.MapGroup("")
|
||||
.AllowAnonymous()
|
||||
.WithTags("discovery");
|
||||
|
||||
var configuration = app.ServiceProvider.GetService<IConfiguration>();
|
||||
var environment = app.ServiceProvider.GetService<IHostEnvironment>();
|
||||
var allowAnonymous = configuration?.GetValue("OpenApi:AllowAnonymous", environment?.IsDevelopment() ?? false) ?? false;
|
||||
if (allowAnonymous)
|
||||
{
|
||||
group.AllowAnonymous();
|
||||
}
|
||||
|
||||
group.MapGet("/.well-known/openapi", (Delegate)GetDiscoveryMetadata)
|
||||
.WithName("GetOpenApiDiscovery")
|
||||
.WithSummary("OpenAPI discovery metadata")
|
||||
|
||||
@@ -86,13 +86,16 @@ builder.Services.AddExceptionReportServices();
|
||||
builder.Services.AddLineageExportServices();
|
||||
|
||||
// Export API services (profiles, runs, artifacts)
|
||||
var allowInMemoryRepositories = builder.Configuration.GetValue(
|
||||
"Export:AllowInMemoryRepositories",
|
||||
builder.Environment.IsDevelopment());
|
||||
builder.Services.AddExportApiServices(options =>
|
||||
{
|
||||
options.MaxConcurrentRunsPerTenant = builder.Configuration.GetValue("Export:MaxConcurrentRunsPerTenant", 4);
|
||||
options.MaxConcurrentRunsPerProfile = builder.Configuration.GetValue("Export:MaxConcurrentRunsPerProfile", 2);
|
||||
options.QueueExcessRuns = builder.Configuration.GetValue("Export:QueueExcessRuns", true);
|
||||
options.MaxQueueSizePerTenant = builder.Configuration.GetValue("Export:MaxQueueSizePerTenant", 10);
|
||||
});
|
||||
}, allowInMemoryRepositories);
|
||||
|
||||
builder.Services.AddOpenApi();
|
||||
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.WebService.Telemetry;
|
||||
using StellaOps.ExportCenter.WebService.Timeline;
|
||||
|
||||
@@ -24,6 +27,7 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
private static readonly string[] OptionalProviderIds = ["nvd", "osv", "ghsa", "epss"];
|
||||
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly ILogger<RiskBundleJobHandler> _logger;
|
||||
private readonly IExportTimelinePublisher _timelinePublisher;
|
||||
private readonly RiskBundleJobHandlerOptions _options;
|
||||
@@ -33,11 +37,13 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
|
||||
public RiskBundleJobHandler(
|
||||
TimeProvider timeProvider,
|
||||
IGuidProvider guidProvider,
|
||||
ILogger<RiskBundleJobHandler> logger,
|
||||
IExportTimelinePublisher timelinePublisher,
|
||||
IOptions<RiskBundleJobHandlerOptions>? options = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timelinePublisher = timelinePublisher ?? throw new ArgumentNullException(nameof(timelinePublisher));
|
||||
_options = options?.Value ?? RiskBundleJobHandlerOptions.Default;
|
||||
@@ -80,7 +86,8 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var jobId = request.JobId?.ToString("N") ?? Guid.NewGuid().ToString("N");
|
||||
PruneExpiredJobs(now);
|
||||
var jobId = request.JobId?.ToString("N") ?? _guidProvider.NewGuid().ToString("N");
|
||||
|
||||
// Validate provider selection
|
||||
var selectedProviders = ResolveSelectedProviders(request.SelectedProviders);
|
||||
@@ -102,6 +109,20 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
};
|
||||
}
|
||||
|
||||
var activeJobs = _jobs.Values.Count(j => j.Status is RiskBundleJobStatus.Pending or RiskBundleJobStatus.Running);
|
||||
if (_options.MaxConcurrentJobs > 0 && activeJobs >= _options.MaxConcurrentJobs)
|
||||
{
|
||||
return new RiskBundleJobSubmitResult
|
||||
{
|
||||
Success = false,
|
||||
JobId = jobId,
|
||||
Status = RiskBundleJobStatus.Failed,
|
||||
ErrorMessage = "Maximum concurrent jobs reached",
|
||||
SubmittedAt = now,
|
||||
SelectedProviders = selectedProviders
|
||||
};
|
||||
}
|
||||
|
||||
// Create job state
|
||||
var jobState = new RiskBundleJobState
|
||||
{
|
||||
@@ -169,6 +190,8 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(jobId);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
PruneExpiredJobs(_timeProvider.GetUtcNow());
|
||||
|
||||
if (!_jobs.TryGetValue(jobId, out var state))
|
||||
{
|
||||
return Task.FromResult<RiskBundleJobStatusDetail?>(null);
|
||||
@@ -184,6 +207,8 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
PruneExpiredJobs(_timeProvider.GetUtcNow());
|
||||
|
||||
var query = _jobs.Values.AsEnumerable();
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(tenantId))
|
||||
@@ -216,6 +241,7 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
return false;
|
||||
}
|
||||
|
||||
var originalStatus = state.Status;
|
||||
state.Status = RiskBundleJobStatus.Cancelled;
|
||||
state.CompletedAt = _timeProvider.GetUtcNow();
|
||||
state.CancellationSource?.Cancel();
|
||||
@@ -229,7 +255,7 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
state.CorrelationId,
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["original_status"] = state.Status.ToString()
|
||||
["original_status"] = originalStatus.ToString()
|
||||
},
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
@@ -242,6 +268,10 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
{
|
||||
state.CancellationSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
var linkedToken = state.CancellationSource.Token;
|
||||
if (_options.JobTimeout > TimeSpan.Zero)
|
||||
{
|
||||
state.CancellationSource.CancelAfter(_options.JobTimeout);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
@@ -263,14 +293,21 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
linkedToken.ThrowIfCancellationRequested();
|
||||
|
||||
// Create simulated outcome
|
||||
var bundleId = Guid.NewGuid();
|
||||
var storagePrefix = string.IsNullOrWhiteSpace(state.Request?.StoragePrefix)
|
||||
? _options.DefaultStoragePrefix
|
||||
: state.Request.StoragePrefix!;
|
||||
var bundleFileName = string.IsNullOrWhiteSpace(state.Request?.BundleFileName)
|
||||
? "risk-bundle.tar.gz"
|
||||
: state.Request.BundleFileName!;
|
||||
var bundleId = CreateDeterministicGuid($"bundle:{state.JobId}");
|
||||
var rootHash = $"sha256:{ComputeDeterministicSha256($"root:{state.JobId}")}";
|
||||
state.Outcome = new RiskBundleOutcomeSummary
|
||||
{
|
||||
BundleId = bundleId,
|
||||
RootHash = $"sha256:{Guid.NewGuid():N}",
|
||||
BundleStorageKey = $"risk-bundles/{bundleId:N}/risk-bundle.tar.gz",
|
||||
ManifestStorageKey = $"risk-bundles/{bundleId:N}/provider-manifest.json",
|
||||
ManifestSignatureStorageKey = $"risk-bundles/{bundleId:N}/signatures/provider-manifest.dsse",
|
||||
RootHash = rootHash,
|
||||
BundleStorageKey = $"{storagePrefix}/{bundleId:N}/{bundleFileName}",
|
||||
ManifestStorageKey = $"{storagePrefix}/{bundleId:N}/provider-manifest.json",
|
||||
ManifestSignatureStorageKey = $"{storagePrefix}/{bundleId:N}/signatures/provider-manifest.dsse",
|
||||
ProviderCount = state.SelectedProviders.Count,
|
||||
TotalSizeBytes = state.SelectedProviders.Count * 1024 * 1024 // Simulated
|
||||
};
|
||||
@@ -279,7 +316,7 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
.Select(p => new RiskBundleProviderResult
|
||||
{
|
||||
ProviderId = p,
|
||||
Sha256 = $"sha256:{Guid.NewGuid():N}",
|
||||
Sha256 = $"sha256:{ComputeDeterministicSha256($"provider:{state.JobId}:{p}")}",
|
||||
SizeBytes = 1024 * 1024,
|
||||
Source = $"mirror://{p}/current",
|
||||
SnapshotDate = DateOnly.FromDateTime(_timeProvider.GetUtcNow().DateTime),
|
||||
@@ -299,11 +336,11 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
new Dictionary<string, string>
|
||||
{
|
||||
["bundle_id"] = bundleId.ToString("N"),
|
||||
["root_hash"] = state.Outcome.RootHash,
|
||||
["root_hash"] = rootHash,
|
||||
["provider_count"] = state.Outcome.ProviderCount.ToString(),
|
||||
["total_size_bytes"] = state.Outcome.TotalSizeBytes.ToString()
|
||||
},
|
||||
CancellationToken.None).ConfigureAwait(false);
|
||||
linkedToken).ConfigureAwait(false);
|
||||
|
||||
// Record metrics
|
||||
ExportTelemetry.RiskBundleJobsCompleted.Add(1,
|
||||
@@ -322,7 +359,8 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
{
|
||||
if (state.Status != RiskBundleJobStatus.Cancelled)
|
||||
{
|
||||
state.Status = RiskBundleJobStatus.Cancelled;
|
||||
state.Status = RiskBundleJobStatus.Failed;
|
||||
state.ErrorMessage = "Job timed out or was cancelled.";
|
||||
state.CompletedAt = _timeProvider.GetUtcNow();
|
||||
}
|
||||
}
|
||||
@@ -343,7 +381,7 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
["error"] = ex.Message,
|
||||
["error_type"] = ex.GetType().Name
|
||||
},
|
||||
CancellationToken.None).ConfigureAwait(false);
|
||||
linkedToken).ConfigureAwait(false);
|
||||
|
||||
ExportTelemetry.RiskBundleJobsCompleted.Add(1,
|
||||
new KeyValuePair<string, object?>("tenant_id", state.TenantId ?? "unknown"),
|
||||
@@ -448,6 +486,27 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
return null;
|
||||
}
|
||||
|
||||
private void PruneExpiredJobs(DateTimeOffset now)
|
||||
{
|
||||
if (_options.JobRetentionPeriod <= TimeSpan.Zero)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var cutoff = now - _options.JobRetentionPeriod;
|
||||
foreach (var (jobId, job) in _jobs)
|
||||
{
|
||||
if (job.Status is RiskBundleJobStatus.Completed or RiskBundleJobStatus.Failed or RiskBundleJobStatus.Cancelled)
|
||||
{
|
||||
var completedAt = job.CompletedAt ?? job.SubmittedAt;
|
||||
if (completedAt < cutoff)
|
||||
{
|
||||
_jobs.TryRemove(jobId, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private RiskBundleAvailableProvider CreateProviderInfo(string providerId, bool mandatory)
|
||||
{
|
||||
var (displayName, description) = providerId switch
|
||||
@@ -489,6 +548,18 @@ public sealed class RiskBundleJobHandler : IRiskBundleJobHandler
|
||||
};
|
||||
}
|
||||
|
||||
private static Guid CreateDeterministicGuid(string input)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return new Guid(hash.AsSpan(0, 16));
|
||||
}
|
||||
|
||||
private static string ComputeDeterministicSha256(string input)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private sealed class RiskBundleJobState
|
||||
{
|
||||
public required string JobId { get; init; }
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.RiskBundle;
|
||||
|
||||
@@ -22,6 +23,7 @@ public static class RiskBundleServiceCollectionExtensions
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
|
||||
// Configure options if provided
|
||||
if (configure is not null)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Determinism;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.SimulationExport;
|
||||
|
||||
@@ -19,6 +20,7 @@ public static class SimulationExportServiceCollectionExtensions
|
||||
|
||||
// Register TimeProvider if not already registered
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
services.TryAddSingleton<IGuidProvider, SystemGuidProvider>();
|
||||
|
||||
// Register the exporter
|
||||
services.TryAddSingleton<ISimulationReportExporter, SimulationReportExporter>();
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Globalization;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.ExportCenter.WebService.Telemetry;
|
||||
|
||||
namespace StellaOps.ExportCenter.WebService.SimulationExport;
|
||||
@@ -31,7 +34,9 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
};
|
||||
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly ILogger<SimulationReportExporter> _logger;
|
||||
private readonly SimulationReportExporterOptions _options;
|
||||
|
||||
// In-memory stores (would be replaced with persistent storage in production)
|
||||
private readonly ConcurrentDictionary<string, SimulationExportDocument> _exports = new();
|
||||
@@ -39,10 +44,14 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
|
||||
public SimulationReportExporter(
|
||||
TimeProvider timeProvider,
|
||||
ILogger<SimulationReportExporter> logger)
|
||||
IGuidProvider guidProvider,
|
||||
ILogger<SimulationReportExporter> logger,
|
||||
IOptions<SimulationReportExporterOptions>? options = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_options = options?.Value ?? SimulationReportExporterOptions.Default;
|
||||
|
||||
// Initialize with sample simulations for demonstration
|
||||
InitializeSampleSimulations();
|
||||
@@ -54,6 +63,7 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
PruneExpiredEntries(_timeProvider.GetUtcNow());
|
||||
|
||||
var query = _simulations.Values.AsEnumerable();
|
||||
|
||||
@@ -92,7 +102,8 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var exportId = $"exp-{Guid.NewGuid():N}";
|
||||
PruneExpiredEntries(now);
|
||||
var exportId = $"exp-{_guidProvider.NewGuid():N}";
|
||||
|
||||
if (!_simulations.TryGetValue(request.SimulationId, out var simulation))
|
||||
{
|
||||
@@ -200,13 +211,15 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
PruneExpiredEntries(_timeProvider.GetUtcNow());
|
||||
|
||||
if (!_simulations.TryGetValue(request.SimulationId, out var simulation))
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var exportId = $"exp-{Guid.NewGuid():N}";
|
||||
var exportId = $"exp-{_guidProvider.NewGuid():N}";
|
||||
|
||||
// Emit metadata first
|
||||
yield return new SimulationExportLine
|
||||
@@ -447,11 +460,11 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Sample simulation 1
|
||||
var sim1Id = "sim-001-" + Guid.NewGuid().ToString("N")[..8];
|
||||
var sim1Id = $"sim-001-{CreateDeterministicSuffix("sim-001")}";
|
||||
_simulations[sim1Id] = CreateSampleSimulation(sim1Id, "baseline-risk-v1", "1.0.0", now.AddHours(-2), 150);
|
||||
|
||||
// Sample simulation 2
|
||||
var sim2Id = "sim-002-" + Guid.NewGuid().ToString("N")[..8];
|
||||
var sim2Id = $"sim-002-{CreateDeterministicSuffix("sim-002")}";
|
||||
_simulations[sim2Id] = CreateSampleSimulation(sim2Id, "strict-risk-v2", "2.1.0", now.AddHours(-1), 85);
|
||||
}
|
||||
|
||||
@@ -462,7 +475,7 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
DateTimeOffset timestamp,
|
||||
int findingCount)
|
||||
{
|
||||
var random = new Random(simulationId.GetHashCode());
|
||||
var random = new SampleRandom(ComputeStableSeed(simulationId));
|
||||
var findings = new List<ExportedFindingScore>();
|
||||
var severities = new[] { "critical", "high", "medium", "low", "informational" };
|
||||
var actions = new[] { "upgrade", "patch", "monitor", "accept", "investigate" };
|
||||
@@ -518,7 +531,7 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
SimulationId = simulationId,
|
||||
ProfileId = profileId,
|
||||
ProfileVersion = profileVersion,
|
||||
ProfileHash = $"sha256:{Guid.NewGuid():N}",
|
||||
ProfileHash = $"sha256:{ComputeDeterministicHash($"profile:{simulationId}")}",
|
||||
Timestamp = timestamp,
|
||||
TenantId = "default",
|
||||
TotalFindings = findingCount,
|
||||
@@ -532,7 +545,7 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
MediumCount = medium,
|
||||
LowCount = low,
|
||||
InformationalCount = info,
|
||||
DeterminismHash = $"det-{Guid.NewGuid():N}",
|
||||
DeterminismHash = $"det-{ComputeDeterministicHash($"det:{simulationId}")}",
|
||||
FindingScores = findings,
|
||||
TopMovers = findings
|
||||
.OrderByDescending(f => f.NormalizedScore)
|
||||
@@ -628,6 +641,119 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
};
|
||||
}
|
||||
|
||||
private void PruneExpiredEntries(DateTimeOffset now)
|
||||
{
|
||||
if (_options.RetentionPeriod > TimeSpan.Zero)
|
||||
{
|
||||
var cutoff = now - _options.RetentionPeriod;
|
||||
foreach (var (exportId, document) in _exports)
|
||||
{
|
||||
if (document.Metadata.ExportTimestamp < cutoff)
|
||||
{
|
||||
_exports.TryRemove(exportId, out _);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var (simulationId, simulation) in _simulations)
|
||||
{
|
||||
if (simulation.Timestamp < cutoff)
|
||||
{
|
||||
_simulations.TryRemove(simulationId, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TrimToMax(_exports, _options.MaxExports, doc => doc.Metadata.ExportTimestamp);
|
||||
TrimToMax(_simulations, _options.MaxSimulations, sim => sim.Timestamp);
|
||||
}
|
||||
|
||||
private static void TrimToMax<TValue>(
|
||||
ConcurrentDictionary<string, TValue> store,
|
||||
int maxCount,
|
||||
Func<TValue, DateTimeOffset> timestampSelector)
|
||||
{
|
||||
if (maxCount <= 0 || store.Count <= maxCount)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var excess = store.Count - maxCount;
|
||||
var toRemove = store
|
||||
.OrderBy(kvp => timestampSelector(kvp.Value))
|
||||
.Take(excess)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in toRemove)
|
||||
{
|
||||
store.TryRemove(key, out _);
|
||||
}
|
||||
}
|
||||
|
||||
private static int ComputeStableSeed(string input)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
var seed = (hash[0] << 24) | (hash[1] << 16) | (hash[2] << 8) | hash[3];
|
||||
return seed;
|
||||
}
|
||||
|
||||
private static string CreateDeterministicSuffix(string input)
|
||||
{
|
||||
return ComputeDeterministicHash(input)[..8];
|
||||
}
|
||||
|
||||
private static string ComputeDeterministicHash(string input)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private sealed class SampleRandom
|
||||
{
|
||||
private uint _state;
|
||||
|
||||
public SampleRandom(int seed)
|
||||
{
|
||||
_state = (uint)seed;
|
||||
if (_state == 0)
|
||||
{
|
||||
_state = 1;
|
||||
}
|
||||
}
|
||||
|
||||
public double NextDouble()
|
||||
{
|
||||
return (NextUInt() & 0x00FFFFFF) / (double)0x01000000;
|
||||
}
|
||||
|
||||
public int Next(int maxExclusive)
|
||||
{
|
||||
if (maxExclusive <= 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
return (int)(NextUInt() % (uint)maxExclusive);
|
||||
}
|
||||
|
||||
public int Next(int minInclusive, int maxExclusive)
|
||||
{
|
||||
if (maxExclusive <= minInclusive)
|
||||
{
|
||||
return minInclusive;
|
||||
}
|
||||
|
||||
var range = (uint)(maxExclusive - minInclusive);
|
||||
return (int)(NextUInt() % range) + minInclusive;
|
||||
}
|
||||
|
||||
private uint NextUInt()
|
||||
{
|
||||
_state = 1664525u * _state + 1013904223u;
|
||||
return _state;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class SimulatedSimulationResult
|
||||
{
|
||||
public required string SimulationId { get; init; }
|
||||
@@ -657,3 +783,26 @@ public sealed class SimulationReportExporter : ISimulationReportExporter
|
||||
public TrendSection? Trends { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for simulation report exporter retention.
|
||||
/// </summary>
|
||||
public sealed record SimulationReportExporterOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum number of stored exports.
|
||||
/// </summary>
|
||||
public int MaxExports { get; init; } = 200;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of stored simulations.
|
||||
/// </summary>
|
||||
public int MaxSimulations { get; init; } = 200;
|
||||
|
||||
/// <summary>
|
||||
/// Retention period for in-memory entries.
|
||||
/// </summary>
|
||||
public TimeSpan RetentionPeriod { get; init; } = TimeSpan.FromHours(24);
|
||||
|
||||
public static SimulationReportExporterOptions Default => new();
|
||||
}
|
||||
|
||||
@@ -7,4 +7,5 @@ Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0337-M | DONE | Revalidated 2026-01-07; maintainability audit for ExportCenter.WebService. |
|
||||
| AUDIT-0337-T | DONE | Revalidated 2026-01-07; test coverage audit for ExportCenter.WebService. |
|
||||
| AUDIT-0337-A | TODO | Pending approval (non-test project; revalidated 2026-01-07). |
|
||||
| AUDIT-0337-A | DONE | Applied 2026-01-13; determinism, DI guards, retention/TLS gating, tests added. |
|
||||
| AUDIT-HOTLIST-EXPORTCENTER-WEBSERVICE-0001 | DONE | Applied 2026-01-13; hotlist remediation and tests completed. |
|
||||
|
||||
@@ -0,0 +1,62 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.DependencyInjection;
|
||||
using StellaOps.Notify.Engine;
|
||||
using StellaOps.Notify.Models;
|
||||
|
||||
namespace StellaOps.Notify.Connectors.Webhook;
|
||||
|
||||
/// <summary>
|
||||
/// Health provider for generic webhook notification channels.
|
||||
/// </summary>
|
||||
[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)]
|
||||
public sealed class WebhookChannelHealthProvider : INotifyChannelHealthProvider
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public NotifyChannelType ChannelType => NotifyChannelType.Webhook;
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var builder = WebhookMetadataBuilder.CreateBuilder(context)
|
||||
.Add("webhook.channel.enabled", context.Channel.Enabled ? "true" : "false")
|
||||
.Add("webhook.validation.endpointPresent", HasConfiguredEndpoint(context.Channel) ? "true" : "false");
|
||||
|
||||
var metadata = builder.Build();
|
||||
var status = ResolveStatus(context.Channel);
|
||||
var message = status switch
|
||||
{
|
||||
ChannelHealthStatus.Healthy => "Webhook channel configuration validated.",
|
||||
ChannelHealthStatus.Degraded => "Webhook channel is disabled; enable it to resume deliveries.",
|
||||
ChannelHealthStatus.Unhealthy => "Webhook channel is missing a target URL or endpoint configuration.",
|
||||
_ => "Webhook channel diagnostics completed."
|
||||
};
|
||||
|
||||
return Task.FromResult(new ChannelHealthResult(status, message, metadata));
|
||||
}
|
||||
|
||||
private static ChannelHealthStatus ResolveStatus(NotifyChannel channel)
|
||||
{
|
||||
if (!HasConfiguredEndpoint(channel))
|
||||
{
|
||||
return ChannelHealthStatus.Unhealthy;
|
||||
}
|
||||
|
||||
if (!channel.Enabled)
|
||||
{
|
||||
return ChannelHealthStatus.Degraded;
|
||||
}
|
||||
|
||||
return ChannelHealthStatus.Healthy;
|
||||
}
|
||||
|
||||
private static bool HasConfiguredEndpoint(NotifyChannel channel)
|
||||
=> !string.IsNullOrWhiteSpace(channel.Config.Endpoint) ||
|
||||
!string.IsNullOrWhiteSpace(channel.Config.Target);
|
||||
}
|
||||
@@ -5,6 +5,7 @@ using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.Policy.RiskProfile.Export;
|
||||
using StellaOps.Policy.RiskProfile.Hashing;
|
||||
using StellaOps.Policy.RiskProfile.Models;
|
||||
@@ -22,6 +23,7 @@ public sealed class RiskProfileAirGapExportService
|
||||
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
private readonly ISealedModeService? _sealedModeService;
|
||||
private readonly RiskProfileHasher _hasher;
|
||||
private readonly ILogger<RiskProfileAirGapExportService> _logger;
|
||||
@@ -35,11 +37,13 @@ public sealed class RiskProfileAirGapExportService
|
||||
public RiskProfileAirGapExportService(
|
||||
ICryptoHash cryptoHash,
|
||||
TimeProvider timeProvider,
|
||||
IGuidProvider guidProvider,
|
||||
ILogger<RiskProfileAirGapExportService> logger,
|
||||
ISealedModeService? sealedModeService = null)
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_sealedModeService = sealedModeService;
|
||||
_hasher = new RiskProfileHasher(cryptoHash);
|
||||
@@ -74,7 +78,7 @@ public sealed class RiskProfileAirGapExportService
|
||||
var export = new RiskProfileAirGapExport(
|
||||
Key: $"profile-{profile.Id}-{profile.Version}",
|
||||
Format: "json",
|
||||
ExportId: Guid.NewGuid().ToString("N")[..16],
|
||||
ExportId: _guidProvider.NewGuid().ToString("N")[..16],
|
||||
ProfileId: profile.Id,
|
||||
ProfileVersion: profile.Version,
|
||||
CreatedAt: now.ToString("O", CultureInfo.InvariantCulture),
|
||||
@@ -426,9 +430,9 @@ public sealed class RiskProfileAirGapExportService
|
||||
SignedAt: signedAt.ToString("O", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string GenerateBundleId(DateTimeOffset timestamp)
|
||||
private string GenerateBundleId(DateTimeOffset timestamp)
|
||||
{
|
||||
return $"rpab-{timestamp:yyyyMMddHHmmss}-{Guid.NewGuid():N}"[..24];
|
||||
return $"rpab-{timestamp:yyyyMMddHHmmss}-{_guidProvider.NewGuid():N}"[..24];
|
||||
}
|
||||
|
||||
private static string GetSigningKey(string? keyId)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System.Text.Json;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Cryptography;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Attestation;
|
||||
@@ -191,10 +191,8 @@ public sealed class RvaBuilder
|
||||
|
||||
private string ComputeAttestationId(RiskVerdictAttestation attestation)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(attestation with { AttestationId = "" },
|
||||
RvaSerializerOptions.Canonical);
|
||||
|
||||
var hash = _cryptoHash.ComputeHashHex(System.Text.Encoding.UTF8.GetBytes(json), "SHA256");
|
||||
var canonical = CanonJson.Canonicalize(attestation with { AttestationId = "" });
|
||||
var hash = _cryptoHash.ComputeHashHex(canonical, "SHA256");
|
||||
return $"rva:sha256:{hash}";
|
||||
}
|
||||
|
||||
@@ -208,19 +206,3 @@ public sealed class RvaBuilder
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Centralized JSON serializer options for RVA.
|
||||
/// </summary>
|
||||
internal static class RvaSerializerOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Canonical JSON options for deterministic serialization.
|
||||
/// </summary>
|
||||
public static JsonSerializerOptions Canonical { get; } = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Attestor.Envelope;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Policy.Snapshots;
|
||||
|
||||
@@ -272,16 +272,15 @@ public sealed class RvaVerifier : IRvaVerifier
|
||||
|
||||
private static bool VerifyAttestationId(RiskVerdictAttestation attestation)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(attestation with { AttestationId = "" },
|
||||
RvaSerializerOptions.Canonical);
|
||||
var expectedId = $"rva:sha256:{ComputeSha256(json)}";
|
||||
var canonical = CanonJson.Canonicalize(attestation with { AttestationId = "" });
|
||||
var expectedId = $"rva:sha256:{ComputeSha256(canonical)}";
|
||||
return attestation.AttestationId == expectedId;
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string input)
|
||||
private static string ComputeSha256(ReadOnlySpan<byte> input)
|
||||
{
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
var bytes = SHA256.HashData(input);
|
||||
return Convert.ToHexStringLower(bytes);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -326,6 +326,8 @@ public sealed record VerdictAppliedGuardrails
|
||||
/// </summary>
|
||||
public sealed record VerdictScoringProof
|
||||
{
|
||||
private const string DefaultCalculatorVersion = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new VerdictScoringProof.
|
||||
/// </summary>
|
||||
@@ -382,7 +384,7 @@ public sealed record VerdictScoringProof
|
||||
inputs: VerdictEvidenceInputs.FromEvidenceInputValues(ewsResult.Inputs),
|
||||
weights: VerdictEvidenceWeights.FromEvidenceWeights(ewsResult.Weights),
|
||||
policyDigest: ewsResult.PolicyDigest,
|
||||
calculatorVersion: "1.0.0", // TODO: Get from calculator metadata
|
||||
calculatorVersion: DefaultCalculatorVersion,
|
||||
calculatedAt: ewsResult.CalculatedAt
|
||||
);
|
||||
}
|
||||
|
||||
@@ -203,8 +203,7 @@ public sealed class VerdictPredicateBuilder
|
||||
return null;
|
||||
}
|
||||
|
||||
// TODO: Extract full reachability paths from trace or evidence
|
||||
// For now, return basic reachability status
|
||||
// Reachability paths are not yet supplied; emit status-only until trace evidence expands.
|
||||
return new VerdictReachability(
|
||||
status: reachabilityStatus,
|
||||
paths: null
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.Policy.Engine.Ledger;
|
||||
|
||||
namespace StellaOps.Policy.Engine.ConsoleExport;
|
||||
@@ -20,19 +21,22 @@ internal sealed partial class ConsoleExportJobService
|
||||
private readonly IConsoleExportBundleStore _bundleStore;
|
||||
private readonly LedgerExportService _ledgerExport;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IGuidProvider _guidProvider;
|
||||
|
||||
public ConsoleExportJobService(
|
||||
IConsoleExportJobStore jobStore,
|
||||
IConsoleExportExecutionStore executionStore,
|
||||
IConsoleExportBundleStore bundleStore,
|
||||
LedgerExportService ledgerExport,
|
||||
TimeProvider timeProvider)
|
||||
TimeProvider timeProvider,
|
||||
IGuidProvider guidProvider)
|
||||
{
|
||||
_jobStore = jobStore ?? throw new ArgumentNullException(nameof(jobStore));
|
||||
_executionStore = executionStore ?? throw new ArgumentNullException(nameof(executionStore));
|
||||
_bundleStore = bundleStore ?? throw new ArgumentNullException(nameof(bundleStore));
|
||||
_ledgerExport = ledgerExport ?? throw new ArgumentNullException(nameof(ledgerExport));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_guidProvider = guidProvider ?? throw new ArgumentNullException(nameof(guidProvider));
|
||||
}
|
||||
|
||||
public async Task<ExportBundleJob> CreateJobAsync(
|
||||
@@ -216,7 +220,7 @@ internal sealed partial class ConsoleExportJobService
|
||||
CompletedAt = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
|
||||
Error = ex.Message
|
||||
};
|
||||
await _executionStore.SaveAsync(failedExecution, CancellationToken.None).ConfigureAwait(false);
|
||||
await _executionStore.SaveAsync(failedExecution, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -285,9 +289,9 @@ internal sealed partial class ConsoleExportJobService
|
||||
return from.AddDays(1).ToString("O", CultureInfo.InvariantCulture);
|
||||
}
|
||||
|
||||
private static string GenerateId(string prefix)
|
||||
private string GenerateId(string prefix)
|
||||
{
|
||||
return $"{prefix}-{Guid.NewGuid():N}"[..16];
|
||||
return $"{prefix}-{_guidProvider.NewGuid():N}"[..16];
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] data)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Http;
|
||||
using StellaOps.Determinism;
|
||||
using StellaOps.Policy.Confidence.Configuration;
|
||||
using StellaOps.Policy.Confidence.Services;
|
||||
using StellaOps.Policy.Engine.Attestation;
|
||||
@@ -35,8 +36,8 @@ public static class PolicyEngineServiceCollectionExtensions
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyEngineCore(this IServiceCollection services)
|
||||
{
|
||||
// Time provider
|
||||
services.TryAddSingleton(TimeProvider.System);
|
||||
// Determinism defaults (TimeProvider + IGuidProvider)
|
||||
services.AddDeterminismDefaults();
|
||||
|
||||
// Core compilation and evaluation services
|
||||
services.TryAddSingleton<PolicyCompilationService>();
|
||||
|
||||
@@ -26,7 +26,7 @@ public static class PolicyLintEndpoints
|
||||
group.MapGet("/rules", GetLintRulesAsync)
|
||||
.WithName("Policy.Lint.GetRules")
|
||||
.WithDescription("Get available lint rules and their severities")
|
||||
.AllowAnonymous();
|
||||
.RequireAuthorization(policy => policy.RequireClaim("scope", "policy:read"));
|
||||
|
||||
return routes;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user