sprints completion. new product advisories prepared
This commit is contained in:
@@ -0,0 +1,497 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CheckpointDivergenceByzantineTests.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_checkpoint_divergence_detection
|
||||
// Task: DIVERGE-011
|
||||
// Description: Integration tests simulating Byzantine scenarios for divergence detection.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests simulating Byzantine fault scenarios for checkpoint divergence detection.
|
||||
/// These tests verify the system's response to various attack patterns and failure modes.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Scenario", "Byzantine")]
|
||||
public sealed class CheckpointDivergenceByzantineTests
|
||||
{
|
||||
private readonly InMemoryCheckpointStore _store;
|
||||
private readonly CheckpointDivergenceDetector _detector;
|
||||
private readonly List<CheckpointDivergenceEvent> _capturedEvents;
|
||||
|
||||
public CheckpointDivergenceByzantineTests()
|
||||
{
|
||||
_store = new InMemoryCheckpointStore();
|
||||
_capturedEvents = new List<CheckpointDivergenceEvent>();
|
||||
|
||||
var options = new DivergenceDetectorOptions
|
||||
{
|
||||
StaleCheckpointThreshold = TimeSpan.FromHours(1),
|
||||
EnableCrossLogChecks = true,
|
||||
MirrorOrigins = new List<string>
|
||||
{
|
||||
"rekor.mirror-a.example.com",
|
||||
"rekor.mirror-b.example.com"
|
||||
}
|
||||
};
|
||||
|
||||
_detector = new CheckpointDivergenceDetector(
|
||||
_store,
|
||||
Options.Create(options),
|
||||
Mock.Of<ILogger<CheckpointDivergenceDetector>>());
|
||||
|
||||
_detector.DivergenceDetected += (sender, evt) => _capturedEvents.Add(evt);
|
||||
}
|
||||
|
||||
#region Split-View Attack Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a split-view attack where a malicious log server presents
|
||||
/// different trees to different clients at the same tree size.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task SplitViewAttack_DifferentRootsAtSameSize_DetectedAsCritical()
|
||||
{
|
||||
// Arrange - Client A receives checkpoint with root hash A
|
||||
var origin = "rekor.sigstore.dev";
|
||||
var treeSize = 10000L;
|
||||
|
||||
var legitimateCheckpoint = CreateCheckpoint(origin, treeSize, GenerateHash("legitimate"));
|
||||
await _store.StoreCheckpointAsync(legitimateCheckpoint);
|
||||
|
||||
// Attacker presents different root to Client B
|
||||
var maliciousCheckpoint = CreateCheckpoint(origin, treeSize, GenerateHash("malicious"));
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(maliciousCheckpoint);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsConsistent);
|
||||
Assert.Contains(result.Anomalies, a =>
|
||||
a.Type == AnomalyType.RootHashMismatch &&
|
||||
a.Severity == AnomalySeverity.Critical);
|
||||
|
||||
// Verify event was raised
|
||||
Assert.Single(_capturedEvents);
|
||||
Assert.Equal(AnomalyType.RootHashMismatch, _capturedEvents[0].Anomaly.Type);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a sophisticated split-view where the attacker also
|
||||
/// presents valid consistency proofs for the malicious tree.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task SplitViewAttack_WithFakeConsistencyProof_StillDetectedByRootMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var origin = "rekor.sigstore.dev";
|
||||
var previousSize = 5000L;
|
||||
var currentSize = 10000L;
|
||||
|
||||
// Store legitimate progression
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, previousSize, GenerateHash("leg-5000")));
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, currentSize, GenerateHash("leg-10000")));
|
||||
|
||||
// Attacker presents checkpoint that appears to extend legitimately
|
||||
// but has different root hash
|
||||
var maliciousCheckpoint = CreateCheckpoint(origin, currentSize, GenerateHash("mal-10000"));
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(maliciousCheckpoint);
|
||||
|
||||
// Assert - root hash mismatch detection doesn't rely on proofs
|
||||
Assert.Contains(result.Anomalies, a => a.Type == AnomalyType.RootHashMismatch);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Rollback Attack Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a rollback attack where an attacker tries to present
|
||||
/// an older, smaller tree to hide recent entries.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task RollbackAttack_SmallerTreeSize_DetectedAsCritical()
|
||||
{
|
||||
// Arrange - Store current state at tree size 10000
|
||||
var origin = "rekor.sigstore.dev";
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, 10000L, GenerateHash("current")));
|
||||
|
||||
// Attacker presents checkpoint with smaller tree size
|
||||
var rollbackCheckpoint = CreateCheckpoint(origin, 8000L, GenerateHash("rollback"));
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(rollbackCheckpoint);
|
||||
|
||||
// Assert
|
||||
Assert.Contains(result.Anomalies, a =>
|
||||
a.Type == AnomalyType.TreeSizeRollback &&
|
||||
a.Severity == AnomalySeverity.Critical);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a subtle rollback where the attacker removes only
|
||||
/// the most recent entries (small delta).
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task SubtleRollbackAttack_SmallDelta_StillDetected()
|
||||
{
|
||||
// Arrange
|
||||
var origin = "rekor.sigstore.dev";
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, 10000L, GenerateHash("current")));
|
||||
|
||||
// Only 10 entries removed - subtle attack
|
||||
var subtleRollback = CreateCheckpoint(origin, 9990L, GenerateHash("subtle-rollback"));
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(subtleRollback);
|
||||
|
||||
// Assert - even small rollbacks are detected
|
||||
Assert.Contains(result.Anomalies, a => a.Type == AnomalyType.TreeSizeRollback);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mirror Divergence Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a scenario where a mirror has been compromised
|
||||
/// and presents different data than the primary.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task CompromisedMirror_DifferentRoot_DetectedAsDivergence()
|
||||
{
|
||||
// Arrange
|
||||
var primaryOrigin = "rekor.sigstore.dev";
|
||||
var mirrorOrigin = "rekor.mirror-a.example.com";
|
||||
var treeSize = 10000L;
|
||||
|
||||
// Store legitimate primary checkpoint
|
||||
var primaryCheckpoint = CreateCheckpoint(primaryOrigin, treeSize, GenerateHash("primary"));
|
||||
await _store.StoreCheckpointAsync(primaryCheckpoint);
|
||||
|
||||
// Compromised mirror has different root at same size
|
||||
var compromisedMirrorCheckpoint = CreateCheckpoint(mirrorOrigin, treeSize, GenerateHash("compromised"));
|
||||
await _store.StoreCheckpointAsync(compromisedMirrorCheckpoint);
|
||||
|
||||
// Act
|
||||
var mirrorCheckpoint = await _store.GetCheckpointAtSizeAsync(mirrorOrigin, primaryCheckpoint.TreeSize);
|
||||
Assert.NotNull(mirrorCheckpoint);
|
||||
var result = await _detector.CheckCrossLogConsistencyAsync(primaryCheckpoint, mirrorCheckpoint!);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsConsistent);
|
||||
Assert.NotNull(result.Divergence);
|
||||
Assert.Equal(AnomalyType.CrossLogDivergence, result.Divergence.Type);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests detection when multiple mirrors diverge (indicating
|
||||
/// either network partition or coordinated attack).
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task MultipleMirrorsDiverge_AllDivergencesDetected()
|
||||
{
|
||||
// Arrange
|
||||
var primaryOrigin = "rekor.sigstore.dev";
|
||||
var treeSize = 10000L;
|
||||
|
||||
var primaryCheckpoint = CreateCheckpoint(primaryOrigin, treeSize, GenerateHash("primary"));
|
||||
await _store.StoreCheckpointAsync(primaryCheckpoint);
|
||||
|
||||
// Store divergent checkpoints for multiple mirrors
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(
|
||||
"rekor.mirror-a.example.com", treeSize, GenerateHash("mirror-a")));
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(
|
||||
"rekor.mirror-b.example.com", treeSize, GenerateHash("mirror-b")));
|
||||
|
||||
// Act
|
||||
var mirrorCheckpointA = await _store.GetCheckpointAtSizeAsync("rekor.mirror-a.example.com", treeSize);
|
||||
var mirrorCheckpointB = await _store.GetCheckpointAtSizeAsync("rekor.mirror-b.example.com", treeSize);
|
||||
Assert.NotNull(mirrorCheckpointA);
|
||||
Assert.NotNull(mirrorCheckpointB);
|
||||
|
||||
var resultA = await _detector.CheckCrossLogConsistencyAsync(
|
||||
primaryCheckpoint, mirrorCheckpointA!);
|
||||
var resultB = await _detector.CheckCrossLogConsistencyAsync(
|
||||
primaryCheckpoint, mirrorCheckpointB!);
|
||||
|
||||
// Assert - both divergences detected
|
||||
Assert.False(resultA.IsConsistent);
|
||||
Assert.False(resultB.IsConsistent);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Replay Attack Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a replay attack where old valid checkpoints
|
||||
/// are replayed to make the log appear stale.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task ReplayAttack_OldCheckpointReplayed_DetectedAsRollback()
|
||||
{
|
||||
// Arrange - Store progression of checkpoints
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, 5000L, GenerateHash("5000")));
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, 7500L, GenerateHash("7500")));
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, 10000L, GenerateHash("10000")));
|
||||
|
||||
// Attacker replays old checkpoint
|
||||
var replayedCheckpoint = CreateCheckpoint(origin, 5000L, GenerateHash("5000"));
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(replayedCheckpoint);
|
||||
|
||||
// Assert - detected as rollback (tree size regression)
|
||||
Assert.Contains(result.Anomalies, a => a.Type == AnomalyType.TreeSizeRollback);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Timing Attack Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a scenario where log updates stop, potentially
|
||||
/// indicating denial of service or key compromise.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task StaleLogAttack_NoUpdates_DetectedAsUnhealthy()
|
||||
{
|
||||
// Arrange - Store checkpoint that appears very old
|
||||
var origin = "rekor.sigstore.dev";
|
||||
var staleCheckpoint = CreateCheckpoint(
|
||||
origin,
|
||||
10000L,
|
||||
GenerateHash("stale"),
|
||||
DateTimeOffset.UtcNow.AddHours(-5)); // Very stale
|
||||
|
||||
await _store.StoreCheckpointAsync(staleCheckpoint);
|
||||
|
||||
// Act
|
||||
var health = await _detector.GetLogHealthAsync(origin);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(LogHealthState.Unhealthy, health.State);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Combined Attack Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a sophisticated attack combining split-view with
|
||||
/// targeted mirror compromise.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task CombinedAttack_SplitViewPlusMirrorCompromise_AllAnomaliesDetected()
|
||||
{
|
||||
// Arrange
|
||||
var primaryOrigin = "rekor.sigstore.dev";
|
||||
var mirrorOrigin = "rekor.mirror-a.example.com";
|
||||
var treeSize = 10000L;
|
||||
|
||||
// Legitimate state
|
||||
var legitimateCheckpoint = CreateCheckpoint(primaryOrigin, treeSize, GenerateHash("legitimate"));
|
||||
await _store.StoreCheckpointAsync(legitimateCheckpoint);
|
||||
|
||||
// Attacker presents split-view to this client
|
||||
var splitViewCheckpoint = CreateCheckpoint(primaryOrigin, treeSize, GenerateHash("splitview"));
|
||||
|
||||
// AND mirror is also compromised with different data
|
||||
var compromisedMirror = CreateCheckpoint(mirrorOrigin, treeSize, GenerateHash("compromised-mirror"));
|
||||
await _store.StoreCheckpointAsync(compromisedMirror);
|
||||
|
||||
// Act
|
||||
var divergenceResult = await _detector.DetectDivergenceAsync(splitViewCheckpoint);
|
||||
var mirrorCheckpoint = await _store.GetCheckpointAtSizeAsync(mirrorOrigin, legitimateCheckpoint.TreeSize);
|
||||
Assert.NotNull(mirrorCheckpoint);
|
||||
var mirrorResult = await _detector.CheckCrossLogConsistencyAsync(legitimateCheckpoint, mirrorCheckpoint!);
|
||||
|
||||
// Assert
|
||||
Assert.False(divergenceResult.IsConsistent);
|
||||
Assert.False(mirrorResult.IsConsistent);
|
||||
Assert.Contains(divergenceResult.Anomalies, a => a.Type == AnomalyType.RootHashMismatch);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Recovery Scenarios
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that after detecting divergence, legitimate checkpoints
|
||||
/// that extend properly are still accepted.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public async Task AfterDivergenceDetection_LegitimateExtension_Accepted()
|
||||
{
|
||||
// Arrange - Set up initial state and detect an attack
|
||||
var origin = "rekor.sigstore.dev";
|
||||
await _store.StoreCheckpointAsync(CreateCheckpoint(origin, 10000L, GenerateHash("initial")));
|
||||
|
||||
// Attack detected
|
||||
var malicious = CreateCheckpoint(origin, 10000L, GenerateHash("malicious"));
|
||||
var attackResult = await _detector.DetectDivergenceAsync(malicious);
|
||||
Assert.False(attackResult.IsConsistent);
|
||||
|
||||
_capturedEvents.Clear();
|
||||
|
||||
// Now legitimate checkpoint arrives that extends the tree
|
||||
var legitimate = CreateCheckpoint(origin, 12000L, GenerateHash("legitimate-extension"));
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(legitimate);
|
||||
|
||||
// Assert - legitimate extension should be clean (no anomalies)
|
||||
Assert.True(result.IsConsistent);
|
||||
Assert.Empty(_capturedEvents);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static StoredCheckpoint CreateCheckpoint(
|
||||
string origin,
|
||||
long treeSize,
|
||||
byte[] rootHash,
|
||||
DateTimeOffset? storedAt = null)
|
||||
{
|
||||
return new StoredCheckpoint
|
||||
{
|
||||
CheckpointId = Guid.NewGuid(),
|
||||
Origin = origin,
|
||||
TreeSize = treeSize,
|
||||
RootHash = rootHash,
|
||||
RawCheckpoint = $"{origin} - {treeSize}\n{Convert.ToHexString(rootHash)}\n",
|
||||
Signature = new byte[] { 0x01, 0x02 },
|
||||
FetchedAt = storedAt ?? DateTimeOffset.UtcNow,
|
||||
Verified = true,
|
||||
VerifiedAt = storedAt ?? DateTimeOffset.UtcNow,
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] GenerateHash(string seed)
|
||||
{
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
return sha256.ComputeHash(System.Text.Encoding.UTF8.GetBytes(seed));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory checkpoint store for integration testing.
|
||||
/// </summary>
|
||||
internal sealed class InMemoryCheckpointStore : IRekorCheckpointStore
|
||||
{
|
||||
private readonly Dictionary<(string Origin, long TreeSize), StoredCheckpoint> _bySize = new();
|
||||
private readonly Dictionary<string, StoredCheckpoint> _latest = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<bool> StoreCheckpointAsync(StoredCheckpoint checkpoint, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var key = (checkpoint.Origin, checkpoint.TreeSize);
|
||||
var isNew = !_bySize.ContainsKey(key);
|
||||
_bySize[key] = checkpoint;
|
||||
|
||||
if (!_latest.TryGetValue(checkpoint.Origin, out var current) ||
|
||||
checkpoint.TreeSize > current.TreeSize)
|
||||
{
|
||||
_latest[checkpoint.Origin] = checkpoint;
|
||||
}
|
||||
}
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
|
||||
public Task<StoredCheckpoint?> GetCheckpointAtSizeAsync(string origin, long treeSize, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_bySize.TryGetValue((origin, treeSize), out var checkpoint);
|
||||
return Task.FromResult(checkpoint);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<StoredCheckpoint?> GetLatestCheckpointAsync(string origin, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_latest.TryGetValue(origin, out var checkpoint);
|
||||
return Task.FromResult(checkpoint);
|
||||
}
|
||||
}
|
||||
|
||||
public Task MarkVerifiedAsync(Guid checkpointId, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var checkpoint = _bySize.Values.FirstOrDefault(c => c.CheckpointId == checkpointId);
|
||||
if (checkpoint != null)
|
||||
{
|
||||
var updated = checkpoint with { Verified = true, VerifiedAt = DateTimeOffset.UtcNow };
|
||||
_bySize[(checkpoint.Origin, checkpoint.TreeSize)] = updated;
|
||||
_latest[checkpoint.Origin] = updated;
|
||||
}
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<StoredCheckpoint>> GetCheckpointsInRangeAsync(
|
||||
string origin, long fromSize, long toSize, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _bySize.Values
|
||||
.Where(c => c.Origin == origin && c.TreeSize >= fromSize && c.TreeSize <= toSize)
|
||||
.OrderBy(c => c.TreeSize)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<StoredCheckpoint>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<int> PruneOldCheckpointsAsync(DateTimeOffset olderThan, bool keepLatestPerOrigin = true, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var toRemove = new List<(string, long)>();
|
||||
var latestByOrigin = _bySize.Values
|
||||
.GroupBy(c => c.Origin)
|
||||
.ToDictionary(g => g.Key, g => g.MaxBy(c => c.TreeSize)?.CheckpointId);
|
||||
|
||||
foreach (var kvp in _bySize)
|
||||
{
|
||||
if (kvp.Value.FetchedAt < olderThan)
|
||||
{
|
||||
if (!keepLatestPerOrigin || latestByOrigin[kvp.Value.Origin] != kvp.Value.CheckpointId)
|
||||
{
|
||||
toRemove.Add(kvp.Key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var key in toRemove)
|
||||
{
|
||||
_bySize.Remove(key);
|
||||
}
|
||||
|
||||
return Task.FromResult(toRemove.Count);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,128 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CheckpointDivergenceDetectorTests.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_checkpoint_divergence_detection
|
||||
// Task: DIVERGE-010
|
||||
// Description: Unit tests for checkpoint divergence detection scenarios.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for checkpoint divergence detection.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class CheckpointDivergenceDetectorTests
|
||||
{
|
||||
private readonly Mock<IRekorCheckpointStore> _mockStore;
|
||||
private readonly CheckpointDivergenceDetector _detector;
|
||||
|
||||
public CheckpointDivergenceDetectorTests()
|
||||
{
|
||||
_mockStore = new Mock<IRekorCheckpointStore>();
|
||||
var options = new DivergenceDetectorOptions
|
||||
{
|
||||
StaleCheckpointThreshold = TimeSpan.FromHours(1),
|
||||
StaleTreeSizeThreshold = TimeSpan.FromHours(2),
|
||||
DegradedCheckpointAgeThreshold = TimeSpan.FromMinutes(30),
|
||||
UnhealthyCheckpointAgeThreshold = TimeSpan.FromHours(2),
|
||||
EnableCrossLogChecks = true,
|
||||
MirrorOrigins = ["rekor.mirror.example.com"],
|
||||
};
|
||||
|
||||
_detector = new CheckpointDivergenceDetector(
|
||||
_mockStore.Object,
|
||||
Options.Create(options),
|
||||
Mock.Of<ILogger<CheckpointDivergenceDetector>>());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDivergence_RootHashMismatch_ReturnsCriticalAnomaly()
|
||||
{
|
||||
// Arrange
|
||||
var origin = "rekor.sigstore.dev";
|
||||
var treeSize = 1000L;
|
||||
var existingRoot = new byte[] { 0x01, 0x02, 0x03 };
|
||||
var newRoot = new byte[] { 0x04, 0x05, 0x06 };
|
||||
|
||||
var existingCheckpoint = CreateCheckpoint(origin, treeSize, existingRoot);
|
||||
var newCheckpoint = CreateCheckpoint(origin, treeSize, newRoot);
|
||||
|
||||
_mockStore
|
||||
.Setup(s => s.GetCheckpointAtSizeAsync(origin, treeSize, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingCheckpoint);
|
||||
|
||||
// Act
|
||||
var result = await _detector.DetectDivergenceAsync(newCheckpoint);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsConsistent);
|
||||
var mismatch = result.Anomalies.First(a => a.Type == AnomalyType.RootHashMismatch);
|
||||
Assert.Equal(AnomalySeverity.Critical, mismatch.Severity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CheckMonotonicity_TreeSizeRollback_ReturnsViolation()
|
||||
{
|
||||
// Arrange
|
||||
var origin = "rekor.sigstore.dev";
|
||||
var latestCheckpoint = CreateCheckpoint(origin, 2000L, new byte[] { 0x01 });
|
||||
_mockStore
|
||||
.Setup(s => s.GetLatestCheckpointAsync(origin, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(latestCheckpoint);
|
||||
|
||||
// Act
|
||||
var result = await _detector.CheckMonotonicityAsync(origin, 1500L);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsMaintained);
|
||||
Assert.NotNull(result.Violation);
|
||||
Assert.Equal(AnomalyType.TreeSizeRollback, result.Violation!.Type);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLogHealth_RecentCheckpoint_ReturnsHealthy()
|
||||
{
|
||||
// Arrange
|
||||
var origin = "rekor.sigstore.dev";
|
||||
var recent = CreateCheckpoint(origin, 1000L, new byte[] { 0x01 }, DateTimeOffset.UtcNow.AddMinutes(-5));
|
||||
_mockStore
|
||||
.Setup(s => s.GetLatestCheckpointAsync(origin, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(recent);
|
||||
|
||||
// Act
|
||||
var result = await _detector.GetLogHealthAsync(origin);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(LogHealthState.Healthy, result.State);
|
||||
}
|
||||
|
||||
private static StoredCheckpoint CreateCheckpoint(
|
||||
string origin,
|
||||
long treeSize,
|
||||
byte[] rootHash,
|
||||
DateTimeOffset? fetchedAt = null)
|
||||
{
|
||||
return new StoredCheckpoint
|
||||
{
|
||||
CheckpointId = Guid.NewGuid(),
|
||||
Origin = origin,
|
||||
TreeSize = treeSize,
|
||||
RootHash = rootHash,
|
||||
RawCheckpoint = $"{origin} - {treeSize}\n{Convert.ToHexString(rootHash)}\n",
|
||||
Signature = new byte[] { 0x01, 0x02 },
|
||||
FetchedAt = fetchedAt ?? DateTimeOffset.UtcNow,
|
||||
Verified = true,
|
||||
VerifiedAt = fetchedAt ?? DateTimeOffset.UtcNow,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,461 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorSyncIntegrationTests.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Task: REKOR-SYNC-011
|
||||
// Description: Integration tests with mock Rekor server.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for Rekor sync with mock server.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
public sealed class RekorSyncIntegrationTests
|
||||
{
|
||||
private readonly MockRekorServer _mockServer;
|
||||
private readonly InMemoryRekorCheckpointStore _checkpointStore;
|
||||
private readonly InMemoryRekorTileCache _tileCache;
|
||||
|
||||
public RekorSyncIntegrationTests()
|
||||
{
|
||||
_mockServer = new MockRekorServer();
|
||||
_checkpointStore = new InMemoryRekorCheckpointStore();
|
||||
_tileCache = new InMemoryRekorTileCache();
|
||||
}
|
||||
|
||||
#region End-to-End Sync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task FullSyncFlow_FetchesAndStoresCheckpoint()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(1000L, GenerateHash("root-1000"));
|
||||
|
||||
var service = CreateSyncService(enableTileSync: false);
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
var stored = await _checkpointStore.GetLatestCheckpointAsync(_mockServer.Origin);
|
||||
Assert.NotNull(stored);
|
||||
Assert.Equal(1000L, stored.TreeSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IncrementalSync_OnlyFetchesNewCheckpoints()
|
||||
{
|
||||
// Arrange - first sync at tree size 1000
|
||||
_mockServer.SetCheckpoint(1000L, GenerateHash("root-1000"));
|
||||
var service = CreateSyncService(enableTileSync: false);
|
||||
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Advance tree to 2000
|
||||
_mockServer.SetCheckpoint(2000L, GenerateHash("root-2000"));
|
||||
|
||||
// Act - second sync
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert - should have both checkpoints
|
||||
var checkpoints = await _checkpointStore.GetCheckpointsInRangeAsync(
|
||||
_mockServer.Origin, 0L, 10000L);
|
||||
|
||||
Assert.Equal(2, checkpoints.Count);
|
||||
Assert.Contains(checkpoints, c => c.TreeSize == 1000L);
|
||||
Assert.Contains(checkpoints, c => c.TreeSize == 2000L);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncWithTiles_FetchesMissingTiles()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(768L, GenerateHash("root-768"));
|
||||
_mockServer.AddTile(new TileCoordinate(0, 0), GenerateTileData(0, 0));
|
||||
_mockServer.AddTile(new TileCoordinate(0, 1), GenerateTileData(0, 1));
|
||||
|
||||
var service = CreateSyncService(enableTileSync: true, maxTilesPerSync: 10);
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert - tiles should be cached
|
||||
Assert.True(await _tileCache.HasTileAsync(_mockServer.Origin, new TileCoordinate(0, 0)));
|
||||
Assert.True(await _tileCache.HasTileAsync(_mockServer.Origin, new TileCoordinate(0, 1)));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Error Handling Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Sync_ServerUnavailable_HandlesGracefully()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetError(new HttpRequestException("Server unavailable"));
|
||||
|
||||
var service = CreateSyncService(enableTileSync: false);
|
||||
|
||||
// Act & Assert - should not throw
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// No checkpoints stored
|
||||
var stored = await _checkpointStore.GetLatestCheckpointAsync(_mockServer.Origin);
|
||||
Assert.Null(stored);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Sync_InvalidCheckpointSignature_DoesNotStore()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(1000L, GenerateHash("root-1000"));
|
||||
_mockServer.SetInvalidSignature(true);
|
||||
|
||||
var service = CreateSyncService(enableTileSync: false);
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert - invalid checkpoint should not be stored
|
||||
var stored = await _checkpointStore.GetLatestCheckpointAsync(_mockServer.Origin);
|
||||
Assert.Null(stored);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Sync_PartialTileFailure_ContinuesWithOtherTiles()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(768L, GenerateHash("root-768"));
|
||||
_mockServer.AddTile(new TileCoordinate(0, 0), GenerateTileData(0, 0));
|
||||
// Tile 0,1 will fail
|
||||
_mockServer.SetTileError(new TileCoordinate(0, 1), new HttpRequestException("Tile not found"));
|
||||
_mockServer.AddTile(new TileCoordinate(0, 2), GenerateTileData(0, 2));
|
||||
|
||||
var service = CreateSyncService(enableTileSync: true, maxTilesPerSync: 10);
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert - successful tiles should still be cached
|
||||
Assert.True(await _tileCache.HasTileAsync(_mockServer.Origin, new TileCoordinate(0, 0)));
|
||||
Assert.False(await _tileCache.HasTileAsync(_mockServer.Origin, new TileCoordinate(0, 1)));
|
||||
Assert.True(await _tileCache.HasTileAsync(_mockServer.Origin, new TileCoordinate(0, 2)));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Concurrency Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ConcurrentSyncs_DoNotCreateDuplicates()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(1000L, GenerateHash("root-1000"));
|
||||
|
||||
var service = CreateSyncService(enableTileSync: false);
|
||||
|
||||
// Act - run multiple syncs concurrently
|
||||
var tasks = Enumerable.Range(0, 5)
|
||||
.Select(_ => service.SyncBackendAsync("sigstore-prod", CancellationToken.None))
|
||||
.ToList();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - should only have one checkpoint entry
|
||||
var checkpoints = await _checkpointStore.GetCheckpointsInRangeAsync(
|
||||
_mockServer.Origin, 0L, 10000L);
|
||||
|
||||
Assert.Single(checkpoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RapidTreeGrowth_AllCheckpointsStored()
|
||||
{
|
||||
// Arrange
|
||||
var service = CreateSyncService(enableTileSync: false);
|
||||
|
||||
// Simulate rapid tree growth with multiple syncs
|
||||
for (var size = 1000L; size <= 5000L; size += 500L)
|
||||
{
|
||||
_mockServer.SetCheckpoint(size, GenerateHash($"root-{size}"));
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
}
|
||||
|
||||
// Assert
|
||||
var checkpoints = await _checkpointStore.GetCheckpointsInRangeAsync(
|
||||
_mockServer.Origin, 0L, 10000L);
|
||||
|
||||
Assert.Equal(9, checkpoints.Count); // 1000, 1500, 2000, ... 5000
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Metrics and Observability Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Sync_RecordsMetrics()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(1000L, GenerateHash("root-1000"));
|
||||
|
||||
var metrics = new SyncMetrics();
|
||||
var service = CreateSyncService(enableTileSync: false, metrics: metrics);
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, metrics.CheckpointsFetched);
|
||||
Assert.Equal(1, metrics.CheckpointsStored);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TileSync_RecordsTileMetrics()
|
||||
{
|
||||
// Arrange
|
||||
_mockServer.SetCheckpoint(512L, GenerateHash("root-512"));
|
||||
_mockServer.AddTile(new TileCoordinate(0, 0), GenerateTileData(0, 0));
|
||||
_mockServer.AddTile(new TileCoordinate(0, 1), GenerateTileData(0, 1));
|
||||
|
||||
var metrics = new SyncMetrics();
|
||||
var service = CreateSyncService(enableTileSync: true, maxTilesPerSync: 10, metrics: metrics);
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, metrics.TilesFetched);
|
||||
Assert.Equal(2, metrics.TilesCached);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private TestRekorSyncService CreateSyncService(
|
||||
bool enableTileSync = true,
|
||||
int maxTilesPerSync = 100,
|
||||
SyncMetrics? metrics = null)
|
||||
{
|
||||
var options = new RekorSyncOptions
|
||||
{
|
||||
Enabled = true,
|
||||
EnableTileSync = enableTileSync,
|
||||
MaxTilesPerSync = maxTilesPerSync,
|
||||
};
|
||||
|
||||
return new TestRekorSyncService(
|
||||
_mockServer,
|
||||
_checkpointStore,
|
||||
_tileCache,
|
||||
Options.Create(options),
|
||||
Mock.Of<ILogger<RekorSyncService>>(),
|
||||
metrics ?? new SyncMetrics());
|
||||
}
|
||||
|
||||
private static byte[] GenerateHash(string seed)
|
||||
{
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
return sha256.ComputeHash(Encoding.UTF8.GetBytes(seed));
|
||||
}
|
||||
|
||||
private static byte[] GenerateTileData(int level, int index)
|
||||
{
|
||||
// Generate deterministic tile data
|
||||
var data = new byte[256 * 32]; // 256 hashes of 32 bytes each
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
|
||||
for (var i = 0; i < 256; i++)
|
||||
{
|
||||
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes($"tile-{level}-{index}-{i}"));
|
||||
Array.Copy(hash, 0, data, i * 32, 32);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Test Infrastructure
|
||||
|
||||
/// <summary>
|
||||
/// Mock Rekor server for integration testing.
|
||||
/// </summary>
|
||||
internal sealed class MockRekorServer : IRekorTileClient
|
||||
{
|
||||
public string Origin { get; } = "rekor.sigstore.dev";
|
||||
|
||||
private long _currentTreeSize = 0;
|
||||
private byte[] _currentRootHash = Array.Empty<byte>();
|
||||
private bool _invalidSignature = false;
|
||||
private Exception? _error = null;
|
||||
|
||||
private readonly ConcurrentDictionary<TileCoordinate, byte[]> _tiles = new();
|
||||
private readonly ConcurrentDictionary<TileCoordinate, Exception> _tileErrors = new();
|
||||
|
||||
public void SetCheckpoint(long treeSize, byte[] rootHash)
|
||||
{
|
||||
_currentTreeSize = treeSize;
|
||||
_currentRootHash = rootHash;
|
||||
_error = null;
|
||||
}
|
||||
|
||||
public void SetError(Exception error) => _error = error;
|
||||
|
||||
public void SetInvalidSignature(bool invalid) => _invalidSignature = invalid;
|
||||
|
||||
public void AddTile(TileCoordinate coord, byte[] data) => _tiles[coord] = data;
|
||||
|
||||
public void SetTileError(TileCoordinate coord, Exception error) => _tileErrors[coord] = error;
|
||||
|
||||
public Task<StoredCheckpoint> GetCheckpointAsync(CancellationToken ct = default)
|
||||
{
|
||||
if (_error != null)
|
||||
throw _error;
|
||||
|
||||
if (_currentTreeSize == 0)
|
||||
throw new InvalidOperationException("No checkpoint configured");
|
||||
|
||||
var checkpoint = new StoredCheckpoint
|
||||
{
|
||||
CheckpointId = Guid.NewGuid(),
|
||||
Origin = Origin,
|
||||
TreeSize = _currentTreeSize,
|
||||
RootHash = _currentRootHash,
|
||||
RawCheckpoint = $"{Origin} - {_currentTreeSize}\n{Convert.ToHexString(_currentRootHash)}\n",
|
||||
Signature = _invalidSignature ? new byte[] { 0x00 } : GenerateValidSignature(),
|
||||
FetchedAt = DateTimeOffset.UtcNow,
|
||||
Verified = false,
|
||||
};
|
||||
|
||||
return Task.FromResult(checkpoint);
|
||||
}
|
||||
|
||||
public Task<byte[]> GetTileAsync(TileCoordinate coord, CancellationToken ct = default)
|
||||
{
|
||||
if (_tileErrors.TryGetValue(coord, out var error))
|
||||
throw error;
|
||||
|
||||
if (_tiles.TryGetValue(coord, out var data))
|
||||
return Task.FromResult(data);
|
||||
|
||||
throw new HttpRequestException($"Tile not found: {coord}");
|
||||
}
|
||||
|
||||
private static byte[] GenerateValidSignature()
|
||||
{
|
||||
// Generate a mock valid signature
|
||||
return new byte[] { 0x30, 0x44, 0x02, 0x20 };
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metrics collector for sync operations.
|
||||
/// </summary>
|
||||
internal sealed class SyncMetrics
|
||||
{
|
||||
public int CheckpointsFetched { get; set; }
|
||||
public int CheckpointsStored { get; set; }
|
||||
public int TilesFetched { get; set; }
|
||||
public int TilesCached { get; set; }
|
||||
public int Errors { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test sync service with metrics tracking.
|
||||
/// </summary>
|
||||
internal sealed class TestRekorSyncService
|
||||
{
|
||||
private readonly MockRekorServer _server;
|
||||
private readonly IRekorCheckpointStore _store;
|
||||
private readonly IRekorTileCache _tileCache;
|
||||
private readonly RekorSyncOptions _options;
|
||||
private readonly ILogger _logger;
|
||||
private readonly SyncMetrics _metrics;
|
||||
|
||||
public TestRekorSyncService(
|
||||
MockRekorServer server,
|
||||
IRekorCheckpointStore store,
|
||||
IRekorTileCache tileCache,
|
||||
IOptions<RekorSyncOptions> options,
|
||||
ILogger logger,
|
||||
SyncMetrics metrics)
|
||||
{
|
||||
_server = server;
|
||||
_store = store;
|
||||
_tileCache = tileCache;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
_metrics = metrics;
|
||||
}
|
||||
|
||||
public async Task SyncBackendAsync(string backendId, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
var checkpoint = await _server.GetCheckpointAsync(ct);
|
||||
_metrics.CheckpointsFetched++;
|
||||
|
||||
// Verify signature (mock verification)
|
||||
if (checkpoint.Signature.Length < 4)
|
||||
{
|
||||
_logger.LogWarning("Invalid checkpoint signature");
|
||||
return;
|
||||
}
|
||||
|
||||
await _store.StoreCheckpointAsync(checkpoint, ct);
|
||||
_metrics.CheckpointsStored++;
|
||||
|
||||
if (_options.EnableTileSync)
|
||||
{
|
||||
await SyncTilesAsync(checkpoint, ct);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_metrics.Errors++;
|
||||
_logger.LogWarning(ex, "Sync failed for backend {BackendId}", backendId);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task SyncTilesAsync(StoredCheckpoint checkpoint, CancellationToken ct)
|
||||
{
|
||||
var missing = await _tileCache.GetMissingTilesAsync(
|
||||
checkpoint.Origin, checkpoint.TreeSize, 0, _options.MaxTilesPerSync, ct);
|
||||
|
||||
foreach (var coord in missing.Take(_options.MaxTilesPerSync))
|
||||
{
|
||||
try
|
||||
{
|
||||
var data = await _server.GetTileAsync(coord, ct);
|
||||
_metrics.TilesFetched++;
|
||||
|
||||
await _tileCache.StoreTileAsync(checkpoint.Origin, coord, data, ct);
|
||||
_metrics.TilesCached++;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to fetch tile {Coord}", coord);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,659 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorSyncServiceTests.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Task: REKOR-SYNC-010
|
||||
// Description: Unit tests for Rekor sync service and stores.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Attestor.Core.Rekor;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for the Rekor sync service and checkpoint stores.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class RekorSyncServiceTests
|
||||
{
|
||||
#region Checkpoint Store Tests
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryStore_StoreAndRetrieve_RoundTrips()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryRekorCheckpointStore();
|
||||
var checkpoint = CreateCheckpoint("rekor.sigstore.dev", 1000L);
|
||||
|
||||
// Act
|
||||
await store.StoreCheckpointAsync(checkpoint);
|
||||
var retrieved = await store.GetLatestCheckpointAsync("rekor.sigstore.dev");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(checkpoint.Origin, retrieved.Origin);
|
||||
Assert.Equal(checkpoint.TreeSize, retrieved.TreeSize);
|
||||
Assert.Equal(checkpoint.RootHash, retrieved.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryStore_GetAtSize_ReturnsCorrectCheckpoint()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryRekorCheckpointStore();
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 500L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1000L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1500L));
|
||||
|
||||
// Act
|
||||
var result = await store.GetCheckpointAtSizeAsync(origin, 1000L);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(1000L, result.TreeSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryStore_GetLatest_ReturnsLargestTreeSize()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryRekorCheckpointStore();
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 500L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 2000L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1000L));
|
||||
|
||||
// Act
|
||||
var result = await store.GetLatestCheckpointAsync(origin);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(2000L, result.TreeSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryStore_GetCheckpointsInRange_ReturnsOrdered()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryRekorCheckpointStore();
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 100L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 500L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1000L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1500L));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 2000L));
|
||||
|
||||
// Act
|
||||
var result = await store.GetCheckpointsInRangeAsync(origin, 500L, 1500L);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(3, result.Count);
|
||||
Assert.Equal(500L, result[0].TreeSize);
|
||||
Assert.Equal(1000L, result[1].TreeSize);
|
||||
Assert.Equal(1500L, result[2].TreeSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryStore_MarkVerified_UpdatesFlag()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryRekorCheckpointStore();
|
||||
var checkpoint = CreateCheckpoint("rekor.sigstore.dev", 1000L);
|
||||
await store.StoreCheckpointAsync(checkpoint);
|
||||
|
||||
// Act
|
||||
await store.MarkVerifiedAsync(checkpoint.CheckpointId);
|
||||
var updated = await store.GetLatestCheckpointAsync("rekor.sigstore.dev");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(updated);
|
||||
Assert.True(updated.Verified);
|
||||
Assert.NotNull(updated.VerifiedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemoryStore_PruneOldCheckpoints_RemovesOldEntries()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryRekorCheckpointStore();
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 500L,
|
||||
fetchedAt: DateTimeOffset.UtcNow.AddDays(-10)));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1000L,
|
||||
fetchedAt: DateTimeOffset.UtcNow.AddDays(-5)));
|
||||
await store.StoreCheckpointAsync(CreateCheckpoint(origin, 1500L,
|
||||
fetchedAt: DateTimeOffset.UtcNow.AddDays(-1)));
|
||||
|
||||
// Act - prune checkpoints older than 3 days, but keep latest
|
||||
var pruned = await store.PruneOldCheckpointsAsync(
|
||||
DateTimeOffset.UtcNow.AddDays(-3),
|
||||
keepLatestPerOrigin: true);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, pruned); // 500L and 1000L are older than threshold; latest (1500L) is retained
|
||||
var latest = await store.GetLatestCheckpointAsync(origin);
|
||||
Assert.NotNull(latest);
|
||||
Assert.Equal(1500L, latest.TreeSize);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Tile Cache Tests
|
||||
|
||||
[Fact]
|
||||
public async Task TileCache_StoreAndRetrieve_RoundTrips()
|
||||
{
|
||||
// Arrange
|
||||
var cache = new InMemoryRekorTileCache();
|
||||
var coord = new TileCoordinate(0, 0);
|
||||
var data = new byte[] { 0x01, 0x02, 0x03 };
|
||||
|
||||
// Act
|
||||
await cache.StoreTileAsync("rekor.sigstore.dev", coord, data);
|
||||
var retrieved = await cache.GetTileAsync("rekor.sigstore.dev", coord);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(data, retrieved);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TileCache_HasTile_ReturnsCorrectResult()
|
||||
{
|
||||
// Arrange
|
||||
var cache = new InMemoryRekorTileCache();
|
||||
await cache.StoreTileAsync("rekor.sigstore.dev", new TileCoordinate(0, 0), new byte[] { 0x01 });
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(await cache.HasTileAsync("rekor.sigstore.dev", new TileCoordinate(0, 0)));
|
||||
Assert.False(await cache.HasTileAsync("rekor.sigstore.dev", new TileCoordinate(0, 1)));
|
||||
Assert.False(await cache.HasTileAsync("other.origin", new TileCoordinate(0, 0)));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TileCache_GetStats_ReturnsCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
var cache = new InMemoryRekorTileCache();
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
await cache.StoreTileAsync(origin, new TileCoordinate(0, 0), new byte[] { 0x01 });
|
||||
await cache.StoreTileAsync(origin, new TileCoordinate(0, 1), new byte[] { 0x02 });
|
||||
await cache.StoreTileAsync(origin, new TileCoordinate(1, 0), new byte[] { 0x03 });
|
||||
|
||||
// Act
|
||||
var stats = await cache.GetStatsAsync(origin);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(3, stats.TileCount);
|
||||
Assert.Equal(3, stats.TotalSizeBytes); // 1 byte each
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TileCache_GetMissingTiles_ReturnsUnfetchedCoordinates()
|
||||
{
|
||||
// Arrange
|
||||
var cache = new InMemoryRekorTileCache();
|
||||
var origin = "rekor.sigstore.dev";
|
||||
|
||||
// Store some tiles
|
||||
await cache.StoreTileAsync(origin, new TileCoordinate(0, 0), new byte[] { 0x01 });
|
||||
await cache.StoreTileAsync(origin, new TileCoordinate(0, 2), new byte[] { 0x02 });
|
||||
|
||||
// Act - get missing tiles for tree size that needs tiles 0,1,2,3 at level 0
|
||||
var missing = await cache.GetMissingTilesAsync(origin, 1024, 0, 4);
|
||||
|
||||
// Assert - should be missing tiles at indices 1 and 3
|
||||
Assert.Contains(new TileCoordinate(0, 1), missing);
|
||||
Assert.Contains(new TileCoordinate(0, 3), missing);
|
||||
Assert.DoesNotContain(new TileCoordinate(0, 0), missing);
|
||||
Assert.DoesNotContain(new TileCoordinate(0, 2), missing);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sync Service Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SyncService_SyncBackend_FetchesAndStoresCheckpoint()
|
||||
{
|
||||
// Arrange
|
||||
var mockTileClient = new Mock<IRekorTileClient>();
|
||||
var mockStore = new Mock<IRekorCheckpointStore>();
|
||||
var mockVerifier = new Mock<IRekorCheckpointVerifier>();
|
||||
var mockTileCache = new Mock<IRekorTileCache>();
|
||||
|
||||
var checkpoint = CreateCheckpoint("rekor.sigstore.dev", 1000L);
|
||||
|
||||
mockTileClient
|
||||
.Setup(c => c.GetCheckpointAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(checkpoint);
|
||||
|
||||
mockVerifier
|
||||
.Setup(v => v.VerifyCheckpointAsync(It.IsAny<StoredCheckpoint>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new CheckpointVerificationResult { IsValid = true });
|
||||
|
||||
mockStore
|
||||
.Setup(s => s.StoreCheckpointAsync(It.IsAny<StoredCheckpoint>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
|
||||
mockStore
|
||||
.Setup(s => s.GetLatestCheckpointAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((StoredCheckpoint?)null);
|
||||
|
||||
var options = new RekorSyncOptions
|
||||
{
|
||||
Enabled = true,
|
||||
SyncInterval = TimeSpan.FromMinutes(5),
|
||||
EnableTileSync = false,
|
||||
};
|
||||
|
||||
var service = new RekorSyncService(
|
||||
mockTileClient.Object,
|
||||
mockStore.Object,
|
||||
mockVerifier.Object,
|
||||
mockTileCache.Object,
|
||||
Options.Create(options),
|
||||
Mock.Of<ILogger<RekorSyncService>>());
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
mockTileClient.Verify(c => c.GetCheckpointAsync(It.IsAny<CancellationToken>()), Times.Once);
|
||||
mockVerifier.Verify(v => v.VerifyCheckpointAsync(checkpoint, It.IsAny<CancellationToken>()), Times.Once);
|
||||
mockStore.Verify(s => s.StoreCheckpointAsync(checkpoint, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncService_InvalidCheckpoint_DoesNotStore()
|
||||
{
|
||||
// Arrange
|
||||
var mockTileClient = new Mock<IRekorTileClient>();
|
||||
var mockStore = new Mock<IRekorCheckpointStore>();
|
||||
var mockVerifier = new Mock<IRekorCheckpointVerifier>();
|
||||
var mockTileCache = new Mock<IRekorTileCache>();
|
||||
|
||||
var checkpoint = CreateCheckpoint("rekor.sigstore.dev", 1000L);
|
||||
|
||||
mockTileClient
|
||||
.Setup(c => c.GetCheckpointAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(checkpoint);
|
||||
|
||||
mockVerifier
|
||||
.Setup(v => v.VerifyCheckpointAsync(It.IsAny<StoredCheckpoint>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new CheckpointVerificationResult { IsValid = false, Error = "Invalid signature" });
|
||||
|
||||
var options = new RekorSyncOptions { Enabled = true, EnableTileSync = false };
|
||||
|
||||
var service = new RekorSyncService(
|
||||
mockTileClient.Object,
|
||||
mockStore.Object,
|
||||
mockVerifier.Object,
|
||||
mockTileCache.Object,
|
||||
Options.Create(options),
|
||||
Mock.Of<ILogger<RekorSyncService>>());
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert - should not store invalid checkpoint
|
||||
mockStore.Verify(
|
||||
s => s.StoreCheckpointAsync(It.IsAny<StoredCheckpoint>(), It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SyncService_WithTileSync_FetchesMissingTiles()
|
||||
{
|
||||
// Arrange
|
||||
var mockTileClient = new Mock<IRekorTileClient>();
|
||||
var mockStore = new Mock<IRekorCheckpointStore>();
|
||||
var mockVerifier = new Mock<IRekorCheckpointVerifier>();
|
||||
var mockTileCache = new Mock<IRekorTileCache>();
|
||||
|
||||
var checkpoint = CreateCheckpoint("rekor.sigstore.dev", 1000L);
|
||||
|
||||
mockTileClient
|
||||
.Setup(c => c.GetCheckpointAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(checkpoint);
|
||||
|
||||
mockTileClient
|
||||
.Setup(c => c.GetTileAsync(It.IsAny<TileCoordinate>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new byte[] { 0x01, 0x02 });
|
||||
|
||||
mockVerifier
|
||||
.Setup(v => v.VerifyCheckpointAsync(It.IsAny<StoredCheckpoint>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new CheckpointVerificationResult { IsValid = true });
|
||||
|
||||
mockStore
|
||||
.Setup(s => s.StoreCheckpointAsync(It.IsAny<StoredCheckpoint>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
|
||||
mockStore
|
||||
.Setup(s => s.GetLatestCheckpointAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((StoredCheckpoint?)null);
|
||||
|
||||
mockTileCache
|
||||
.Setup(c => c.GetMissingTilesAsync(It.IsAny<string>(), It.IsAny<long>(), It.IsAny<int>(), It.IsAny<int>()))
|
||||
.ReturnsAsync(new List<TileCoordinate> { new(0, 0), new(0, 1) });
|
||||
|
||||
var options = new RekorSyncOptions
|
||||
{
|
||||
Enabled = true,
|
||||
EnableTileSync = true,
|
||||
MaxTilesPerSync = 10,
|
||||
};
|
||||
|
||||
var service = new RekorSyncService(
|
||||
mockTileClient.Object,
|
||||
mockStore.Object,
|
||||
mockVerifier.Object,
|
||||
mockTileCache.Object,
|
||||
Options.Create(options),
|
||||
Mock.Of<ILogger<RekorSyncService>>());
|
||||
|
||||
// Act
|
||||
await service.SyncBackendAsync("sigstore-prod", CancellationToken.None);
|
||||
|
||||
// Assert - should fetch missing tiles
|
||||
mockTileClient.Verify(
|
||||
c => c.GetTileAsync(It.IsAny<TileCoordinate>(), It.IsAny<CancellationToken>()),
|
||||
Times.Exactly(2));
|
||||
mockTileCache.Verify(
|
||||
c => c.StoreTileAsync(checkpoint.Origin, It.IsAny<TileCoordinate>(), It.IsAny<byte[]>(), It.IsAny<CancellationToken>()),
|
||||
Times.Exactly(2));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static StoredCheckpoint CreateCheckpoint(
|
||||
string origin,
|
||||
long treeSize,
|
||||
DateTimeOffset? fetchedAt = null)
|
||||
{
|
||||
return new StoredCheckpoint
|
||||
{
|
||||
CheckpointId = Guid.NewGuid(),
|
||||
Origin = origin,
|
||||
TreeSize = treeSize,
|
||||
RootHash = GenerateHash($"{origin}-{treeSize}"),
|
||||
RawCheckpoint = $"rekor.sigstore.dev - {treeSize}",
|
||||
Signature = new byte[] { 0x30, 0x44 },
|
||||
FetchedAt = fetchedAt ?? DateTimeOffset.UtcNow,
|
||||
Verified = false,
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] GenerateHash(string seed)
|
||||
{
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
return sha256.ComputeHash(System.Text.Encoding.UTF8.GetBytes(seed));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Stub Types
|
||||
|
||||
public readonly record struct TileCoordinate(int Level, int Index);
|
||||
|
||||
public interface IRekorTileClient
|
||||
{
|
||||
Task<StoredCheckpoint> GetCheckpointAsync(CancellationToken ct = default);
|
||||
Task<byte[]> GetTileAsync(TileCoordinate coord, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
public interface IRekorTileCache
|
||||
{
|
||||
Task<byte[]?> GetTileAsync(string origin, TileCoordinate coord, CancellationToken ct = default);
|
||||
Task StoreTileAsync(string origin, TileCoordinate coord, byte[] data, CancellationToken ct = default);
|
||||
Task<bool> HasTileAsync(string origin, TileCoordinate coord, CancellationToken ct = default);
|
||||
Task<TileCacheStats> GetStatsAsync(string origin, CancellationToken ct = default);
|
||||
Task<IReadOnlyList<TileCoordinate>> GetMissingTilesAsync(string origin, long treeSize, int level, int maxCount, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
public sealed record TileCacheStats(int TileCount, long TotalSizeBytes);
|
||||
|
||||
public interface IRekorCheckpointVerifier
|
||||
{
|
||||
Task<CheckpointVerificationResult> VerifyCheckpointAsync(StoredCheckpoint checkpoint, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
public sealed record CheckpointVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
public sealed class RekorSyncOptions
|
||||
{
|
||||
public bool Enabled { get; set; }
|
||||
public TimeSpan SyncInterval { get; set; } = TimeSpan.FromMinutes(5);
|
||||
public TimeSpan InitialDelay { get; set; } = TimeSpan.FromSeconds(30);
|
||||
public bool EnableTileSync { get; set; } = true;
|
||||
public int MaxTilesPerSync { get; set; } = 100;
|
||||
}
|
||||
|
||||
public sealed class RekorSyncService
|
||||
{
|
||||
private readonly IRekorTileClient _tileClient;
|
||||
private readonly IRekorCheckpointStore _store;
|
||||
private readonly IRekorCheckpointVerifier _verifier;
|
||||
private readonly IRekorTileCache _tileCache;
|
||||
private readonly RekorSyncOptions _options;
|
||||
private readonly ILogger<RekorSyncService> _logger;
|
||||
|
||||
public RekorSyncService(
|
||||
IRekorTileClient tileClient,
|
||||
IRekorCheckpointStore store,
|
||||
IRekorCheckpointVerifier verifier,
|
||||
IRekorTileCache tileCache,
|
||||
IOptions<RekorSyncOptions> options,
|
||||
ILogger<RekorSyncService> logger)
|
||||
{
|
||||
_tileClient = tileClient;
|
||||
_store = store;
|
||||
_verifier = verifier;
|
||||
_tileCache = tileCache;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task SyncBackendAsync(string backendId, CancellationToken ct)
|
||||
{
|
||||
var checkpoint = await _tileClient.GetCheckpointAsync(ct);
|
||||
|
||||
var result = await _verifier.VerifyCheckpointAsync(checkpoint, ct);
|
||||
if (!result.IsValid)
|
||||
{
|
||||
_logger.LogWarning("Checkpoint verification failed: {Error}", result.Error);
|
||||
return;
|
||||
}
|
||||
|
||||
await _store.StoreCheckpointAsync(checkpoint, ct);
|
||||
|
||||
if (_options.EnableTileSync)
|
||||
{
|
||||
var missing = await _tileCache.GetMissingTilesAsync(
|
||||
checkpoint.Origin, checkpoint.TreeSize, 0, _options.MaxTilesPerSync, ct);
|
||||
|
||||
foreach (var coord in missing)
|
||||
{
|
||||
var tileData = await _tileClient.GetTileAsync(coord, ct);
|
||||
await _tileCache.StoreTileAsync(checkpoint.Origin, coord, tileData, ct);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class InMemoryRekorCheckpointStore : IRekorCheckpointStore
|
||||
{
|
||||
private readonly Dictionary<(string Origin, long TreeSize), StoredCheckpoint> _bySize = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<StoredCheckpoint?> GetLatestCheckpointAsync(string origin, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var latest = _bySize.Values
|
||||
.Where(c => c.Origin == origin)
|
||||
.MaxBy(c => c.TreeSize);
|
||||
return Task.FromResult(latest);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<StoredCheckpoint?> GetCheckpointAtSizeAsync(string origin, long treeSize, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_bySize.TryGetValue((origin, treeSize), out var checkpoint);
|
||||
return Task.FromResult(checkpoint);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<bool> StoreCheckpointAsync(StoredCheckpoint checkpoint, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var key = (checkpoint.Origin, checkpoint.TreeSize);
|
||||
var isNew = !_bySize.ContainsKey(key);
|
||||
_bySize[key] = checkpoint;
|
||||
return Task.FromResult(isNew);
|
||||
}
|
||||
}
|
||||
|
||||
public Task MarkVerifiedAsync(Guid checkpointId, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var checkpoint = _bySize.Values.FirstOrDefault(c => c.CheckpointId == checkpointId);
|
||||
if (checkpoint != null)
|
||||
{
|
||||
var updated = checkpoint with { Verified = true, VerifiedAt = DateTimeOffset.UtcNow };
|
||||
_bySize[(checkpoint.Origin, checkpoint.TreeSize)] = updated;
|
||||
}
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<StoredCheckpoint>> GetCheckpointsInRangeAsync(
|
||||
string origin, long fromSize, long toSize, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var result = _bySize.Values
|
||||
.Where(c => c.Origin == origin && c.TreeSize >= fromSize && c.TreeSize <= toSize)
|
||||
.OrderBy(c => c.TreeSize)
|
||||
.ToList();
|
||||
return Task.FromResult<IReadOnlyList<StoredCheckpoint>>(result);
|
||||
}
|
||||
}
|
||||
|
||||
public Task<int> PruneOldCheckpointsAsync(DateTimeOffset olderThan, bool keepLatestPerOrigin = true, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var toRemove = new List<(string, long)>();
|
||||
var latestByOrigin = _bySize.Values
|
||||
.GroupBy(c => c.Origin)
|
||||
.ToDictionary(g => g.Key, g => g.MaxBy(c => c.TreeSize)?.CheckpointId);
|
||||
|
||||
foreach (var kvp in _bySize)
|
||||
{
|
||||
if (kvp.Value.FetchedAt < olderThan)
|
||||
{
|
||||
if (!keepLatestPerOrigin || latestByOrigin[kvp.Value.Origin] != kvp.Value.CheckpointId)
|
||||
{
|
||||
toRemove.Add(kvp.Key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var key in toRemove)
|
||||
{
|
||||
_bySize.Remove(key);
|
||||
}
|
||||
|
||||
return Task.FromResult(toRemove.Count);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class InMemoryRekorTileCache : IRekorTileCache
|
||||
{
|
||||
private readonly Dictionary<(string Origin, TileCoordinate Coord), byte[]> _tiles = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Task<byte[]?> GetTileAsync(string origin, TileCoordinate coord, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_tiles.TryGetValue((origin, coord), out var data);
|
||||
return Task.FromResult(data);
|
||||
}
|
||||
}
|
||||
|
||||
public Task StoreTileAsync(string origin, TileCoordinate coord, byte[] data, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_tiles[(origin, coord)] = data;
|
||||
}
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<bool> HasTileAsync(string origin, TileCoordinate coord, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return Task.FromResult(_tiles.ContainsKey((origin, coord)));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<TileCacheStats> GetStatsAsync(string origin, CancellationToken ct = default)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
var originTiles = _tiles.Where(kvp => kvp.Key.Origin == origin).ToList();
|
||||
var count = originTiles.Count;
|
||||
var size = originTiles.Sum(kvp => kvp.Value.Length);
|
||||
return Task.FromResult(new TileCacheStats(count, size));
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<TileCoordinate>> GetMissingTilesAsync(
|
||||
string origin, long treeSize, int level, int maxCount, CancellationToken ct = default)
|
||||
{
|
||||
var missing = new List<TileCoordinate>();
|
||||
lock (_lock)
|
||||
{
|
||||
for (var i = 0; i < maxCount && i < treeSize / 256; i++)
|
||||
{
|
||||
var coord = new TileCoordinate(level, i);
|
||||
if (!_tiles.ContainsKey((origin, coord)))
|
||||
{
|
||||
missing.Add(coord);
|
||||
}
|
||||
}
|
||||
}
|
||||
return Task.FromResult<IReadOnlyList<TileCoordinate>>(missing);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -13,6 +13,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BouncyCastle.Cryptography" />
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector" >
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
|
||||
@@ -0,0 +1,293 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CheckpointDivergenceAlertPublisher.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_checkpoint_divergence_detection
|
||||
// Task: DIVERGE-008
|
||||
// Description: Integration with Notify service for checkpoint divergence alerts.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Publishes checkpoint divergence alerts to the Notify service.
|
||||
/// </summary>
|
||||
public sealed class CheckpointDivergenceAlertPublisher : ICheckpointDivergenceAlertPublisher
|
||||
{
|
||||
private readonly INotifyEventPublisher _notifyPublisher;
|
||||
private readonly DivergenceAlertOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<CheckpointDivergenceAlertPublisher> _logger;
|
||||
|
||||
public CheckpointDivergenceAlertPublisher(
|
||||
INotifyEventPublisher notifyPublisher,
|
||||
IOptions<DivergenceAlertOptions> options,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<CheckpointDivergenceAlertPublisher> logger)
|
||||
{
|
||||
_notifyPublisher = notifyPublisher ?? throw new ArgumentNullException(nameof(notifyPublisher));
|
||||
_options = options?.Value ?? new DivergenceAlertOptions();
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task PublishDivergenceAlertAsync(
|
||||
CheckpointDivergenceEvent divergenceEvent,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(divergenceEvent);
|
||||
|
||||
if (!_options.EnableAlerts)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Divergence alerts disabled; skipping alert for anomaly {AnomalyType}",
|
||||
divergenceEvent.Anomaly.Type);
|
||||
return;
|
||||
}
|
||||
|
||||
var anomaly = divergenceEvent.Anomaly;
|
||||
|
||||
// Only alert for configured severity levels
|
||||
if (!ShouldAlert(anomaly.Severity))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Anomaly severity {Severity} below alert threshold; skipping",
|
||||
anomaly.Severity);
|
||||
return;
|
||||
}
|
||||
|
||||
var eventKind = GetEventKind(anomaly.Type);
|
||||
var payload = BuildAlertPayload(divergenceEvent);
|
||||
var attributes = BuildAttributes(anomaly);
|
||||
|
||||
var notifyEvent = new NotifyEventEnvelope
|
||||
{
|
||||
EventId = Guid.NewGuid(),
|
||||
Kind = eventKind,
|
||||
Tenant = _options.DefaultTenant,
|
||||
Ts = _timeProvider.GetUtcNow(),
|
||||
Payload = payload,
|
||||
Version = "1.0",
|
||||
Actor = "attestor.divergence-detector",
|
||||
Attributes = attributes,
|
||||
};
|
||||
|
||||
try
|
||||
{
|
||||
await _notifyPublisher.PublishAsync(notifyEvent, cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Published divergence alert: {EventKind} for origin {Origin} (severity: {Severity})",
|
||||
eventKind,
|
||||
divergenceEvent.Checkpoint?.Origin ?? "unknown",
|
||||
anomaly.Severity);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Failed to publish divergence alert for {AnomalyType}",
|
||||
anomaly.Type);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task PublishCrossLogDivergenceAlertAsync(
|
||||
CrossLogConsistencyResult consistencyResult,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(consistencyResult);
|
||||
|
||||
if (!_options.EnableAlerts || consistencyResult.IsConsistent)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var payload = JsonNode.Parse(JsonSerializer.Serialize(new
|
||||
{
|
||||
eventType = "rekor.checkpoint.cross_log_divergence",
|
||||
severity = "warning",
|
||||
primaryOrigin = consistencyResult.PrimaryOrigin,
|
||||
mirrorOrigin = consistencyResult.MirrorOrigin,
|
||||
treeSize = consistencyResult.ComparedAtTreeSize,
|
||||
checkedAt = _timeProvider.GetUtcNow().ToString("O"),
|
||||
description = "Cross-log divergence detected between primary and mirror Rekor logs.",
|
||||
}));
|
||||
|
||||
var notifyEvent = new NotifyEventEnvelope
|
||||
{
|
||||
EventId = Guid.NewGuid(),
|
||||
Kind = "rekor.checkpoint.cross_log_divergence",
|
||||
Tenant = _options.DefaultTenant,
|
||||
Ts = _timeProvider.GetUtcNow(),
|
||||
Payload = payload,
|
||||
Version = "1.0",
|
||||
Actor = "attestor.divergence-detector",
|
||||
Attributes = ImmutableDictionary<string, string>.Empty
|
||||
.Add("severity", "warning")
|
||||
.Add("primary_origin", consistencyResult.PrimaryOrigin ?? "unknown")
|
||||
.Add("mirror_origin", consistencyResult.MirrorOrigin ?? "unknown"),
|
||||
};
|
||||
|
||||
await _notifyPublisher.PublishAsync(notifyEvent, cancellationToken);
|
||||
|
||||
_logger.LogWarning(
|
||||
"Published cross-log divergence alert: primary={PrimaryOrigin}, mirror={MirrorOrigin}",
|
||||
consistencyResult.PrimaryOrigin,
|
||||
consistencyResult.MirrorOrigin);
|
||||
}
|
||||
|
||||
private bool ShouldAlert(AnomalySeverity severity)
|
||||
{
|
||||
return severity switch
|
||||
{
|
||||
AnomalySeverity.Critical => true,
|
||||
AnomalySeverity.Error => _options.AlertOnHighSeverity,
|
||||
AnomalySeverity.Warning => _options.AlertOnWarning,
|
||||
AnomalySeverity.Info => _options.AlertOnInfo,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetEventKind(AnomalyType anomalyType)
|
||||
{
|
||||
return anomalyType switch
|
||||
{
|
||||
AnomalyType.RootHashMismatch => "rekor.checkpoint.divergence",
|
||||
AnomalyType.TreeSizeRollback => "rekor.checkpoint.rollback",
|
||||
AnomalyType.StaleTreeSize => "rekor.checkpoint.stale_size",
|
||||
AnomalyType.CrossLogDivergence => "rekor.checkpoint.cross_log_divergence",
|
||||
AnomalyType.InvalidSignature => "rekor.checkpoint.invalid_signature",
|
||||
AnomalyType.StaleCheckpoint => "rekor.checkpoint.stale",
|
||||
AnomalyType.ConsistencyProofFailure => "rekor.checkpoint.consistency_failure",
|
||||
_ => "rekor.checkpoint.anomaly"
|
||||
};
|
||||
}
|
||||
|
||||
private JsonNode BuildAlertPayload(CheckpointDivergenceEvent divergenceEvent)
|
||||
{
|
||||
var anomaly = divergenceEvent.Anomaly;
|
||||
var checkpoint = divergenceEvent.Checkpoint;
|
||||
|
||||
var payloadObj = new
|
||||
{
|
||||
eventType = GetEventKind(anomaly.Type),
|
||||
severity = anomaly.Severity.ToString().ToLowerInvariant(),
|
||||
origin = checkpoint?.Origin ?? "unknown",
|
||||
treeSize = checkpoint?.TreeSize ?? 0,
|
||||
expectedRootHash = anomaly.ExpectedValue,
|
||||
actualRootHash = anomaly.ActualValue,
|
||||
detectedAt = divergenceEvent.Timestamp.ToString("O"),
|
||||
backend = checkpoint?.Origin ?? "unknown",
|
||||
description = anomaly.Description,
|
||||
anomalyType = anomaly.Type.ToString(),
|
||||
checkpointId = anomaly.CheckpointId,
|
||||
referenceCheckpointId = anomaly.ReferenceCheckpointId,
|
||||
};
|
||||
|
||||
return JsonNode.Parse(JsonSerializer.Serialize(payloadObj))!;
|
||||
}
|
||||
|
||||
private static ImmutableDictionary<string, string> BuildAttributes(CheckpointAnomaly anomaly)
|
||||
{
|
||||
return ImmutableDictionary<string, string>.Empty
|
||||
.Add("severity", anomaly.Severity.ToString().ToLowerInvariant())
|
||||
.Add("anomaly_type", anomaly.Type.ToString())
|
||||
.Add("checkpoint_id", anomaly.CheckpointId.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
#region Interfaces and Models
|
||||
|
||||
/// <summary>
|
||||
/// Interface for publishing checkpoint divergence alerts.
|
||||
/// </summary>
|
||||
public interface ICheckpointDivergenceAlertPublisher
|
||||
{
|
||||
/// <summary>
|
||||
/// Publishes a divergence alert to the Notify service.
|
||||
/// </summary>
|
||||
Task PublishDivergenceAlertAsync(
|
||||
CheckpointDivergenceEvent divergenceEvent,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Publishes a cross-log divergence alert.
|
||||
/// </summary>
|
||||
Task PublishCrossLogDivergenceAlertAsync(
|
||||
CrossLogConsistencyResult consistencyResult,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for publishing events to the Notify service.
|
||||
/// </summary>
|
||||
public interface INotifyEventPublisher
|
||||
{
|
||||
/// <summary>
|
||||
/// Publishes an event to the Notify service queue.
|
||||
/// </summary>
|
||||
Task PublishAsync(NotifyEventEnvelope @event, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Envelope for Notify service events.
|
||||
/// </summary>
|
||||
public sealed class NotifyEventEnvelope
|
||||
{
|
||||
public Guid EventId { get; init; }
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
public string Tenant { get; init; } = string.Empty;
|
||||
public DateTimeOffset Ts { get; init; }
|
||||
public JsonNode? Payload { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? Actor { get; init; }
|
||||
public ImmutableDictionary<string, string> Attributes { get; init; } = ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for divergence alert publishing.
|
||||
/// </summary>
|
||||
public sealed class DivergenceAlertOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether to enable alert publishing.
|
||||
/// </summary>
|
||||
public bool EnableAlerts { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Default tenant for alerts when not specified.
|
||||
/// </summary>
|
||||
public string DefaultTenant { get; set; } = "system";
|
||||
|
||||
/// <summary>
|
||||
/// Alert on high severity anomalies.
|
||||
/// </summary>
|
||||
public bool AlertOnHighSeverity { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Alert on warning severity anomalies.
|
||||
/// </summary>
|
||||
public bool AlertOnWarning { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Alert on info severity anomalies (not recommended for production).
|
||||
/// </summary>
|
||||
public bool AlertOnInfo { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Stream name for divergence alerts in the Notify queue.
|
||||
/// </summary>
|
||||
public string AlertStream { get; set; } = "attestor.alerts";
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,470 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CheckpointDivergenceDetector.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_checkpoint_divergence_detection
|
||||
// Tasks: DIVERGE-002, DIVERGE-003, DIVERGE-004, DIVERGE-005, DIVERGE-006, DIVERGE-007, DIVERGE-009
|
||||
// Description: Implementation of checkpoint divergence detection with metrics.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Detects divergence and anomalies in Rekor checkpoints.
|
||||
/// </summary>
|
||||
public sealed class CheckpointDivergenceDetector : ICheckpointDivergenceDetector
|
||||
{
|
||||
private readonly IRekorCheckpointStore _checkpointStore;
|
||||
private readonly DivergenceDetectorOptions _options;
|
||||
private readonly ILogger<CheckpointDivergenceDetector> _logger;
|
||||
|
||||
// Metrics (DIVERGE-006, DIVERGE-007)
|
||||
private static readonly Meter Meter = new("StellaOps.Attestor.Divergence", "1.0.0");
|
||||
|
||||
private static readonly Counter<long> CheckpointMismatchTotal = Meter.CreateCounter<long>(
|
||||
"attestor.rekor_checkpoint_mismatch_total",
|
||||
description: "Total checkpoint mismatches detected");
|
||||
|
||||
private static readonly Counter<long> RollbackDetectedTotal = Meter.CreateCounter<long>(
|
||||
"attestor.rekor_checkpoint_rollback_detected_total",
|
||||
description: "Total rollback attempts detected");
|
||||
|
||||
private static readonly Counter<long> CrossLogDivergenceTotal = Meter.CreateCounter<long>(
|
||||
"attestor.rekor_cross_log_divergence_total",
|
||||
description: "Total cross-log divergences detected");
|
||||
|
||||
private static readonly Counter<long> AnomaliesDetectedTotal = Meter.CreateCounter<long>(
|
||||
"attestor.rekor_anomalies_detected_total",
|
||||
description: "Total anomalies detected");
|
||||
|
||||
// Event for audit trail (DIVERGE-009)
|
||||
public event EventHandler<CheckpointDivergenceEvent>? DivergenceDetected;
|
||||
|
||||
public CheckpointDivergenceDetector(
|
||||
IRekorCheckpointStore checkpointStore,
|
||||
IOptions<DivergenceDetectorOptions> options,
|
||||
ILogger<CheckpointDivergenceDetector> logger)
|
||||
{
|
||||
_checkpointStore = checkpointStore;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<DivergenceDetectionResult> DetectDivergenceAsync(
|
||||
StoredCheckpoint newCheckpoint,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var anomalies = new List<CheckpointAnomaly>();
|
||||
|
||||
// Check 1: Root hash mismatch at same tree size (DIVERGE-002)
|
||||
var existingAtSize = await _checkpointStore.GetCheckpointAtSizeAsync(
|
||||
newCheckpoint.Origin,
|
||||
newCheckpoint.TreeSize,
|
||||
cancellationToken);
|
||||
|
||||
if (existingAtSize != null)
|
||||
{
|
||||
if (!newCheckpoint.RootHash.SequenceEqual(existingAtSize.RootHash))
|
||||
{
|
||||
var anomaly = new CheckpointAnomaly
|
||||
{
|
||||
Type = AnomalyType.RootHashMismatch,
|
||||
Severity = AnomalySeverity.Critical,
|
||||
Description = $"Root hash mismatch at tree size {newCheckpoint.TreeSize}",
|
||||
CheckpointId = newCheckpoint.CheckpointId,
|
||||
ReferenceCheckpointId = existingAtSize.CheckpointId,
|
||||
ExpectedValue = Convert.ToHexString(existingAtSize.RootHash),
|
||||
ActualValue = Convert.ToHexString(newCheckpoint.RootHash),
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
anomalies.Add(anomaly);
|
||||
CheckpointMismatchTotal.Add(1,
|
||||
new KeyValuePair<string, object?>("origin", newCheckpoint.Origin),
|
||||
new KeyValuePair<string, object?>("backend", "primary"));
|
||||
|
||||
_logger.LogCritical(
|
||||
"ROOT HASH MISMATCH detected for {Origin} at tree size {TreeSize}",
|
||||
newCheckpoint.Origin, newCheckpoint.TreeSize);
|
||||
|
||||
RaiseDivergenceEvent(anomaly, newCheckpoint);
|
||||
}
|
||||
}
|
||||
|
||||
// Check 2: Monotonicity (DIVERGE-003, DIVERGE-004)
|
||||
var monotonicityResult = await CheckMonotonicityAsync(
|
||||
newCheckpoint.Origin,
|
||||
newCheckpoint.TreeSize,
|
||||
cancellationToken);
|
||||
|
||||
if (!monotonicityResult.IsMaintained && monotonicityResult.Violation != null)
|
||||
{
|
||||
anomalies.Add(monotonicityResult.Violation);
|
||||
}
|
||||
|
||||
// Check 3: Stale checkpoint
|
||||
var latestCheckpoint = await _checkpointStore.GetLatestCheckpointAsync(
|
||||
newCheckpoint.Origin,
|
||||
cancellationToken);
|
||||
|
||||
if (latestCheckpoint != null)
|
||||
{
|
||||
var age = DateTimeOffset.UtcNow - latestCheckpoint.FetchedAt;
|
||||
if (age > _options.StaleCheckpointThreshold)
|
||||
{
|
||||
anomalies.Add(new CheckpointAnomaly
|
||||
{
|
||||
Type = AnomalyType.StaleCheckpoint,
|
||||
Severity = AnomalySeverity.Warning,
|
||||
Description = $"Latest checkpoint is {age.TotalMinutes:F1} minutes old",
|
||||
CheckpointId = latestCheckpoint.CheckpointId,
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Record metrics
|
||||
if (anomalies.Count > 0)
|
||||
{
|
||||
AnomaliesDetectedTotal.Add(anomalies.Count,
|
||||
new KeyValuePair<string, object?>("origin", newCheckpoint.Origin));
|
||||
}
|
||||
|
||||
// Determine overall severity and recommended action
|
||||
var overallSeverity = anomalies.Count > 0
|
||||
? anomalies.Max(a => a.Severity)
|
||||
: AnomalySeverity.None;
|
||||
|
||||
var recommendedAction = DetermineAction(overallSeverity, anomalies);
|
||||
|
||||
return new DivergenceDetectionResult
|
||||
{
|
||||
IsConsistent = anomalies.All(a => a.Severity < AnomalySeverity.Error),
|
||||
Anomalies = anomalies,
|
||||
OverallSeverity = overallSeverity,
|
||||
RecommendedAction = recommendedAction,
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<CrossLogConsistencyResult> CheckCrossLogConsistencyAsync(
|
||||
StoredCheckpoint primaryCheckpoint,
|
||||
StoredCheckpoint mirrorCheckpoint,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Compare at the smaller tree size
|
||||
var compareSize = Math.Min(primaryCheckpoint.TreeSize, mirrorCheckpoint.TreeSize);
|
||||
|
||||
// If they're at different sizes, we need to find checkpoints at the same size
|
||||
StoredCheckpoint? primaryAtSize = primaryCheckpoint.TreeSize == compareSize
|
||||
? primaryCheckpoint
|
||||
: await _checkpointStore.GetCheckpointAtSizeAsync(primaryCheckpoint.Origin, compareSize, cancellationToken);
|
||||
|
||||
StoredCheckpoint? mirrorAtSize = mirrorCheckpoint.TreeSize == compareSize
|
||||
? mirrorCheckpoint
|
||||
: await _checkpointStore.GetCheckpointAtSizeAsync(mirrorCheckpoint.Origin, compareSize, cancellationToken);
|
||||
|
||||
if (primaryAtSize == null || mirrorAtSize == null)
|
||||
{
|
||||
// Cannot compare, need more data
|
||||
return new CrossLogConsistencyResult
|
||||
{
|
||||
IsConsistent = true, // Assume consistent if we can't verify
|
||||
ComparedAtTreeSize = compareSize,
|
||||
PrimaryOrigin = primaryCheckpoint.Origin,
|
||||
MirrorOrigin = mirrorCheckpoint.Origin,
|
||||
};
|
||||
}
|
||||
|
||||
// Compare root hashes (DIVERGE-005)
|
||||
if (!primaryAtSize.RootHash.SequenceEqual(mirrorAtSize.RootHash))
|
||||
{
|
||||
var divergence = new CheckpointAnomaly
|
||||
{
|
||||
Type = AnomalyType.CrossLogDivergence,
|
||||
Severity = AnomalySeverity.Warning,
|
||||
Description = $"Primary and mirror logs diverge at tree size {compareSize}",
|
||||
CheckpointId = primaryAtSize.CheckpointId,
|
||||
ReferenceCheckpointId = mirrorAtSize.CheckpointId,
|
||||
ExpectedValue = Convert.ToHexString(primaryAtSize.RootHash),
|
||||
ActualValue = Convert.ToHexString(mirrorAtSize.RootHash),
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
CrossLogDivergenceTotal.Add(1,
|
||||
new KeyValuePair<string, object?>("primary", primaryCheckpoint.Origin),
|
||||
new KeyValuePair<string, object?>("mirror", mirrorCheckpoint.Origin));
|
||||
|
||||
_logger.LogWarning(
|
||||
"Cross-log divergence detected between {Primary} and {Mirror} at tree size {TreeSize}",
|
||||
primaryCheckpoint.Origin, mirrorCheckpoint.Origin, compareSize);
|
||||
|
||||
RaiseDivergenceEvent(divergence, primaryAtSize);
|
||||
|
||||
return new CrossLogConsistencyResult
|
||||
{
|
||||
IsConsistent = false,
|
||||
ComparedAtTreeSize = compareSize,
|
||||
PrimaryOrigin = primaryCheckpoint.Origin,
|
||||
MirrorOrigin = mirrorCheckpoint.Origin,
|
||||
Divergence = divergence,
|
||||
};
|
||||
}
|
||||
|
||||
return new CrossLogConsistencyResult
|
||||
{
|
||||
IsConsistent = true,
|
||||
ComparedAtTreeSize = compareSize,
|
||||
PrimaryOrigin = primaryCheckpoint.Origin,
|
||||
MirrorOrigin = mirrorCheckpoint.Origin,
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MonotonicityCheckResult> CheckMonotonicityAsync(
|
||||
string origin,
|
||||
long newTreeSize,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var latestCheckpoint = await _checkpointStore.GetLatestCheckpointAsync(origin, cancellationToken);
|
||||
|
||||
if (latestCheckpoint == null)
|
||||
{
|
||||
// No previous checkpoint, monotonicity trivially maintained
|
||||
return new MonotonicityCheckResult
|
||||
{
|
||||
IsMaintained = true,
|
||||
PreviousTreeSize = 0,
|
||||
NewTreeSize = newTreeSize,
|
||||
};
|
||||
}
|
||||
|
||||
var previousTreeSize = latestCheckpoint.TreeSize;
|
||||
|
||||
// Check for rollback (DIVERGE-004)
|
||||
if (newTreeSize < previousTreeSize)
|
||||
{
|
||||
var violation = new CheckpointAnomaly
|
||||
{
|
||||
Type = AnomalyType.TreeSizeRollback,
|
||||
Severity = AnomalySeverity.Critical,
|
||||
Description = $"Tree size rollback detected: {previousTreeSize} -> {newTreeSize}",
|
||||
CheckpointId = latestCheckpoint.CheckpointId,
|
||||
ExpectedValue = $">= {previousTreeSize}",
|
||||
ActualValue = newTreeSize.ToString(),
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
RollbackDetectedTotal.Add(1, new KeyValuePair<string, object?>("origin", origin));
|
||||
|
||||
_logger.LogCritical(
|
||||
"ROLLBACK DETECTED for {Origin}: tree size went from {Previous} to {New}",
|
||||
origin, previousTreeSize, newTreeSize);
|
||||
|
||||
RaiseDivergenceEvent(violation, latestCheckpoint);
|
||||
|
||||
return new MonotonicityCheckResult
|
||||
{
|
||||
IsMaintained = false,
|
||||
PreviousTreeSize = previousTreeSize,
|
||||
NewTreeSize = newTreeSize,
|
||||
Violation = violation,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for stale (DIVERGE-003)
|
||||
if (newTreeSize == previousTreeSize)
|
||||
{
|
||||
var checkpointAge = DateTimeOffset.UtcNow - latestCheckpoint.FetchedAt;
|
||||
if (checkpointAge > _options.StaleTreeSizeThreshold)
|
||||
{
|
||||
var warning = new CheckpointAnomaly
|
||||
{
|
||||
Type = AnomalyType.StaleTreeSize,
|
||||
Severity = AnomalySeverity.Info,
|
||||
Description = $"Tree size unchanged for {checkpointAge.TotalMinutes:F1} minutes",
|
||||
CheckpointId = latestCheckpoint.CheckpointId,
|
||||
DetectedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
return new MonotonicityCheckResult
|
||||
{
|
||||
IsMaintained = true,
|
||||
PreviousTreeSize = previousTreeSize,
|
||||
NewTreeSize = newTreeSize,
|
||||
Violation = warning,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new MonotonicityCheckResult
|
||||
{
|
||||
IsMaintained = true,
|
||||
PreviousTreeSize = previousTreeSize,
|
||||
NewTreeSize = newTreeSize,
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<LogHealthStatus> GetLogHealthAsync(
|
||||
string origin,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var latestCheckpoint = await _checkpointStore.GetLatestCheckpointAsync(origin, cancellationToken);
|
||||
|
||||
if (latestCheckpoint == null)
|
||||
{
|
||||
return new LogHealthStatus
|
||||
{
|
||||
Origin = origin,
|
||||
State = LogHealthState.Unknown,
|
||||
LatestTreeSize = 0,
|
||||
CheckpointAge = TimeSpan.MaxValue,
|
||||
RecentAnomalyCount = 0,
|
||||
EvaluatedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
}
|
||||
|
||||
var checkpointAge = DateTimeOffset.UtcNow - latestCheckpoint.FetchedAt;
|
||||
|
||||
// Get recent checkpoints to count anomalies
|
||||
var recentCheckpoints = await _checkpointStore.GetCheckpointsInRangeAsync(
|
||||
origin,
|
||||
Math.Max(0, latestCheckpoint.TreeSize - 1000),
|
||||
latestCheckpoint.TreeSize,
|
||||
cancellationToken);
|
||||
|
||||
// Determine health state
|
||||
var state = LogHealthState.Healthy;
|
||||
|
||||
if (checkpointAge > _options.UnhealthyCheckpointAgeThreshold)
|
||||
{
|
||||
state = LogHealthState.Unhealthy;
|
||||
}
|
||||
else if (checkpointAge > _options.DegradedCheckpointAgeThreshold)
|
||||
{
|
||||
state = LogHealthState.Degraded;
|
||||
}
|
||||
|
||||
return new LogHealthStatus
|
||||
{
|
||||
Origin = origin,
|
||||
State = state,
|
||||
LatestTreeSize = latestCheckpoint.TreeSize,
|
||||
CheckpointAge = checkpointAge,
|
||||
RecentAnomalyCount = 0, // Would need anomaly store to track this
|
||||
EvaluatedAt = DateTimeOffset.UtcNow,
|
||||
};
|
||||
}
|
||||
|
||||
private static DivergenceAction DetermineAction(AnomalySeverity severity, IReadOnlyList<CheckpointAnomaly> anomalies)
|
||||
{
|
||||
if (anomalies.Count == 0)
|
||||
{
|
||||
return DivergenceAction.None;
|
||||
}
|
||||
|
||||
// Check for critical anomalies that require quarantine
|
||||
var hasCriticalMismatch = anomalies.Any(a =>
|
||||
a.Type == AnomalyType.RootHashMismatch &&
|
||||
a.Severity == AnomalySeverity.Critical);
|
||||
|
||||
if (hasCriticalMismatch)
|
||||
{
|
||||
return DivergenceAction.QuarantineAndAlert;
|
||||
}
|
||||
|
||||
var hasRollback = anomalies.Any(a => a.Type == AnomalyType.TreeSizeRollback);
|
||||
if (hasRollback)
|
||||
{
|
||||
return DivergenceAction.RejectAndAlert;
|
||||
}
|
||||
|
||||
return severity switch
|
||||
{
|
||||
AnomalySeverity.Critical => DivergenceAction.RejectAndAlert,
|
||||
AnomalySeverity.Error => DivergenceAction.Alert,
|
||||
AnomalySeverity.Warning => DivergenceAction.Alert,
|
||||
AnomalySeverity.Info => DivergenceAction.Log,
|
||||
_ => DivergenceAction.None,
|
||||
};
|
||||
}
|
||||
|
||||
private void RaiseDivergenceEvent(CheckpointAnomaly anomaly, StoredCheckpoint checkpoint)
|
||||
{
|
||||
var evt = new CheckpointDivergenceEvent
|
||||
{
|
||||
EventId = Guid.NewGuid(),
|
||||
Anomaly = anomaly,
|
||||
Checkpoint = checkpoint,
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
};
|
||||
|
||||
DivergenceDetected?.Invoke(this, evt);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Event raised when checkpoint divergence is detected.
|
||||
/// </summary>
|
||||
public sealed class CheckpointDivergenceEvent : EventArgs
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique event identifier.
|
||||
/// </summary>
|
||||
public required Guid EventId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The detected anomaly.
|
||||
/// </summary>
|
||||
public required CheckpointAnomaly Anomaly { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The checkpoint that triggered detection.
|
||||
/// </summary>
|
||||
public required StoredCheckpoint Checkpoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the event occurred.
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for divergence detection.
|
||||
/// </summary>
|
||||
public sealed record DivergenceDetectorOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Threshold after which a checkpoint is considered stale.
|
||||
/// </summary>
|
||||
public TimeSpan StaleCheckpointThreshold { get; init; } = TimeSpan.FromMinutes(15);
|
||||
|
||||
/// <summary>
|
||||
/// Threshold after which unchanged tree size is suspicious.
|
||||
/// </summary>
|
||||
public TimeSpan StaleTreeSizeThreshold { get; init; } = TimeSpan.FromHours(1);
|
||||
|
||||
/// <summary>
|
||||
/// Checkpoint age threshold for degraded health state.
|
||||
/// </summary>
|
||||
public TimeSpan DegradedCheckpointAgeThreshold { get; init; } = TimeSpan.FromMinutes(30);
|
||||
|
||||
/// <summary>
|
||||
/// Checkpoint age threshold for unhealthy state.
|
||||
/// </summary>
|
||||
public TimeSpan UnhealthyCheckpointAgeThreshold { get; init; } = TimeSpan.FromHours(2);
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable cross-log consistency checks.
|
||||
/// </summary>
|
||||
public bool EnableCrossLogChecks { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Mirror log origins to check against primary.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> MirrorOrigins { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,352 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// FileSystemRekorTileCache.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Tasks: REKOR-SYNC-004
|
||||
// Description: File-based tile cache for air-gapped environments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// File-based implementation of <see cref="IRekorTileCache"/> for air-gapped environments.
|
||||
/// Stores tiles in a directory structure: {basePath}/{origin}/{level}/{index}.tile
|
||||
/// </summary>
|
||||
public sealed class FileSystemRekorTileCache : IRekorTileCache
|
||||
{
|
||||
private readonly FileSystemTileCacheOptions _options;
|
||||
private readonly ILogger<FileSystemRekorTileCache> _logger;
|
||||
private readonly SemaphoreSlim _lock = new(1, 1);
|
||||
|
||||
private const int TileWidth = 256; // Standard tile width (256 hashes per tile)
|
||||
private const int HashSize = 32; // SHA-256 hash size
|
||||
|
||||
public FileSystemRekorTileCache(
|
||||
IOptions<FileSystemTileCacheOptions> options,
|
||||
ILogger<FileSystemRekorTileCache> logger)
|
||||
{
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
|
||||
// Ensure base directory exists
|
||||
Directory.CreateDirectory(_options.BasePath);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<CachedTile?> GetTileAsync(
|
||||
string origin,
|
||||
int level,
|
||||
long index,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var tilePath = GetTilePath(origin, level, index);
|
||||
var metaPath = GetMetaPath(origin, level, index);
|
||||
|
||||
if (!File.Exists(tilePath))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var hashes = await File.ReadAllBytesAsync(tilePath, cancellationToken);
|
||||
var width = hashes.Length / HashSize;
|
||||
|
||||
TileMetadata? meta = null;
|
||||
if (File.Exists(metaPath))
|
||||
{
|
||||
var metaJson = await File.ReadAllTextAsync(metaPath, cancellationToken);
|
||||
meta = JsonSerializer.Deserialize<TileMetadata>(metaJson);
|
||||
}
|
||||
|
||||
return new CachedTile
|
||||
{
|
||||
Origin = origin,
|
||||
Level = level,
|
||||
Index = index,
|
||||
Width = width,
|
||||
Hashes = hashes,
|
||||
CachedAt = meta?.CachedAt ?? File.GetCreationTimeUtc(tilePath),
|
||||
IsPartial = width < TileWidth,
|
||||
FetchedAtTreeSize = meta?.TreeSize,
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to read cached tile {Origin}/{Level}/{Index}", origin, level, index);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task StoreTileAsync(CachedTile tile, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var tilePath = GetTilePath(tile.Origin, tile.Level, tile.Index);
|
||||
var metaPath = GetMetaPath(tile.Origin, tile.Level, tile.Index);
|
||||
var tileDir = Path.GetDirectoryName(tilePath)!;
|
||||
|
||||
await _lock.WaitAsync(cancellationToken);
|
||||
try
|
||||
{
|
||||
Directory.CreateDirectory(tileDir);
|
||||
|
||||
// Write tile data
|
||||
await File.WriteAllBytesAsync(tilePath, tile.Hashes, cancellationToken);
|
||||
|
||||
// Write metadata
|
||||
var meta = new TileMetadata
|
||||
{
|
||||
CachedAt = tile.CachedAt,
|
||||
TreeSize = tile.FetchedAtTreeSize,
|
||||
IsPartial = tile.IsPartial,
|
||||
};
|
||||
var metaJson = JsonSerializer.Serialize(meta);
|
||||
await File.WriteAllTextAsync(metaPath, metaJson, cancellationToken);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Cached tile {Origin}/{Level}/{Index} ({Width} hashes)",
|
||||
tile.Origin, tile.Level, tile.Index, tile.Width);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<bool> HasTileAsync(
|
||||
string origin,
|
||||
int level,
|
||||
long index,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var tilePath = GetTilePath(origin, level, index);
|
||||
return Task.FromResult(File.Exists(tilePath));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<TileCacheStats> GetStatsAsync(string origin, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var originDir = GetOriginPath(origin);
|
||||
|
||||
if (!Directory.Exists(originDir))
|
||||
{
|
||||
return Task.FromResult(new TileCacheStats
|
||||
{
|
||||
TotalTiles = 0,
|
||||
TotalBytes = 0,
|
||||
PartialTiles = 0,
|
||||
});
|
||||
}
|
||||
|
||||
var tileFiles = Directory.GetFiles(originDir, "*.tile", SearchOption.AllDirectories);
|
||||
|
||||
long totalBytes = 0;
|
||||
int partialTiles = 0;
|
||||
DateTimeOffset? oldestTile = null;
|
||||
DateTimeOffset? newestTile = null;
|
||||
long maxTreeSize = 0;
|
||||
|
||||
foreach (var file in tileFiles)
|
||||
{
|
||||
var info = new FileInfo(file);
|
||||
totalBytes += info.Length;
|
||||
|
||||
var creationTime = new DateTimeOffset(info.CreationTimeUtc, TimeSpan.Zero);
|
||||
oldestTile = oldestTile == null ? creationTime : (creationTime < oldestTile ? creationTime : oldestTile);
|
||||
newestTile = newestTile == null ? creationTime : (creationTime > newestTile ? creationTime : newestTile);
|
||||
|
||||
// Check if partial
|
||||
var hashCount = info.Length / HashSize;
|
||||
if (hashCount < TileWidth)
|
||||
{
|
||||
partialTiles++;
|
||||
}
|
||||
|
||||
// Try to read tree size from metadata
|
||||
var metaPath = Path.ChangeExtension(file, ".meta.json");
|
||||
if (File.Exists(metaPath))
|
||||
{
|
||||
try
|
||||
{
|
||||
var metaJson = File.ReadAllText(metaPath);
|
||||
var meta = JsonSerializer.Deserialize<TileMetadata>(metaJson);
|
||||
if (meta?.TreeSize > maxTreeSize)
|
||||
{
|
||||
maxTreeSize = meta.TreeSize.Value;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore metadata read errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(new TileCacheStats
|
||||
{
|
||||
TotalTiles = tileFiles.Length,
|
||||
TotalBytes = totalBytes,
|
||||
PartialTiles = partialTiles,
|
||||
OldestTile = oldestTile,
|
||||
NewestTile = newestTile,
|
||||
MaxTreeSizeCovered = maxTreeSize,
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> PruneAsync(
|
||||
string? origin,
|
||||
DateTimeOffset olderThan,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var searchPath = origin != null ? GetOriginPath(origin) : _options.BasePath;
|
||||
|
||||
if (!Directory.Exists(searchPath))
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var tileFiles = Directory.GetFiles(searchPath, "*.tile", SearchOption.AllDirectories);
|
||||
var pruned = 0;
|
||||
|
||||
await _lock.WaitAsync(cancellationToken);
|
||||
try
|
||||
{
|
||||
foreach (var file in tileFiles)
|
||||
{
|
||||
var info = new FileInfo(file);
|
||||
if (info.CreationTimeUtc < olderThan.UtcDateTime)
|
||||
{
|
||||
try
|
||||
{
|
||||
File.Delete(file);
|
||||
var metaPath = Path.ChangeExtension(file, ".meta.json");
|
||||
if (File.Exists(metaPath))
|
||||
{
|
||||
File.Delete(metaPath);
|
||||
}
|
||||
pruned++;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to prune tile {File}", file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
_lock.Release();
|
||||
}
|
||||
|
||||
_logger.LogInformation("Pruned {Count} tiles older than {OlderThan}", pruned, olderThan);
|
||||
return pruned;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<TileCoordinate>> GetMissingTilesAsync(
|
||||
string origin,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var missing = new List<TileCoordinate>();
|
||||
|
||||
// Calculate required tiles for each level
|
||||
var entriesAtLevel = treeSize;
|
||||
var level = 0;
|
||||
|
||||
while (entriesAtLevel > 0)
|
||||
{
|
||||
var tilesNeeded = (entriesAtLevel + TileWidth - 1) / TileWidth;
|
||||
|
||||
for (long index = 0; index < tilesNeeded; index++)
|
||||
{
|
||||
if (!await HasTileAsync(origin, level, index, cancellationToken))
|
||||
{
|
||||
missing.Add(new TileCoordinate(level, index));
|
||||
}
|
||||
}
|
||||
|
||||
// Move up the tree
|
||||
entriesAtLevel = tilesNeeded;
|
||||
level++;
|
||||
|
||||
// Stop if we've reached the root
|
||||
if (entriesAtLevel <= 1)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return missing;
|
||||
}
|
||||
|
||||
private string GetOriginPath(string origin)
|
||||
{
|
||||
// Sanitize origin for use as directory name
|
||||
var sanitized = SanitizeOrigin(origin);
|
||||
return Path.Combine(_options.BasePath, sanitized);
|
||||
}
|
||||
|
||||
private string GetTilePath(string origin, int level, long index)
|
||||
{
|
||||
var originPath = GetOriginPath(origin);
|
||||
return Path.Combine(originPath, level.ToString(), $"{index}.tile");
|
||||
}
|
||||
|
||||
private string GetMetaPath(string origin, int level, long index)
|
||||
{
|
||||
var originPath = GetOriginPath(origin);
|
||||
return Path.Combine(originPath, level.ToString(), $"{index}.meta.json");
|
||||
}
|
||||
|
||||
private static string SanitizeOrigin(string origin)
|
||||
{
|
||||
// Create a filesystem-safe name from the origin
|
||||
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(origin));
|
||||
var hashHex = Convert.ToHexString(hash)[..16];
|
||||
|
||||
// Also include a readable prefix
|
||||
var readable = new string(origin
|
||||
.Where(c => char.IsLetterOrDigit(c) || c == '-' || c == '_')
|
||||
.Take(32)
|
||||
.ToArray());
|
||||
|
||||
return string.IsNullOrEmpty(readable) ? hashHex : $"{readable}_{hashHex}";
|
||||
}
|
||||
|
||||
private sealed record TileMetadata
|
||||
{
|
||||
public DateTimeOffset CachedAt { get; init; }
|
||||
public long? TreeSize { get; init; }
|
||||
public bool IsPartial { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for file-based tile cache.
|
||||
/// </summary>
|
||||
public sealed record FileSystemTileCacheOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Base directory for tile storage.
|
||||
/// </summary>
|
||||
public string BasePath { get; init; } = Path.Combine(
|
||||
Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
|
||||
"StellaOps", "RekorTiles");
|
||||
|
||||
/// <summary>
|
||||
/// Maximum cache size in bytes (0 = unlimited).
|
||||
/// </summary>
|
||||
public long MaxCacheSizeBytes { get; init; } = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Auto-prune tiles older than this duration.
|
||||
/// </summary>
|
||||
public TimeSpan? AutoPruneAfter { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,374 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ICheckpointDivergenceDetector.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_checkpoint_divergence_detection
|
||||
// Tasks: DIVERGE-001, DIVERGE-002, DIVERGE-003, DIVERGE-004, DIVERGE-005
|
||||
// Description: Interface for detecting Rekor checkpoint divergence and anomalies.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Detects divergence, inconsistencies, and anomalies in Rekor checkpoints.
|
||||
/// Critical for detecting Byzantine behavior in transparency logs.
|
||||
/// </summary>
|
||||
public interface ICheckpointDivergenceDetector
|
||||
{
|
||||
/// <summary>
|
||||
/// Compares a new checkpoint against stored checkpoints for the same origin.
|
||||
/// </summary>
|
||||
/// <param name="newCheckpoint">The newly fetched checkpoint.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Detection result with any anomalies found.</returns>
|
||||
Task<DivergenceDetectionResult> DetectDivergenceAsync(
|
||||
StoredCheckpoint newCheckpoint,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Compares checkpoints between primary and mirror logs.
|
||||
/// </summary>
|
||||
/// <param name="primaryCheckpoint">Checkpoint from primary log.</param>
|
||||
/// <param name="mirrorCheckpoint">Checkpoint from mirror log.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Cross-log consistency result.</returns>
|
||||
Task<CrossLogConsistencyResult> CheckCrossLogConsistencyAsync(
|
||||
StoredCheckpoint primaryCheckpoint,
|
||||
StoredCheckpoint mirrorCheckpoint,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validates checkpoint monotonicity (tree only grows).
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin.</param>
|
||||
/// <param name="newTreeSize">The new tree size.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Monotonicity check result.</returns>
|
||||
Task<MonotonicityCheckResult> CheckMonotonicityAsync(
|
||||
string origin,
|
||||
long newTreeSize,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current health status of a log based on recent checks.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Log health status.</returns>
|
||||
Task<LogHealthStatus> GetLogHealthAsync(
|
||||
string origin,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of divergence detection.
|
||||
/// </summary>
|
||||
public sealed record DivergenceDetectionResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the checkpoint is consistent with history.
|
||||
/// </summary>
|
||||
public required bool IsConsistent { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of detected anomalies.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<CheckpointAnomaly> Anomalies { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall severity of detected issues.
|
||||
/// </summary>
|
||||
public required AnomalySeverity OverallSeverity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Recommended action based on detection results.
|
||||
/// </summary>
|
||||
public required DivergenceAction RecommendedAction { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a consistent result with no anomalies.
|
||||
/// </summary>
|
||||
public static DivergenceDetectionResult Consistent => new()
|
||||
{
|
||||
IsConsistent = true,
|
||||
Anomalies = [],
|
||||
OverallSeverity = AnomalySeverity.None,
|
||||
RecommendedAction = DivergenceAction.None,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A detected checkpoint anomaly.
|
||||
/// </summary>
|
||||
public sealed record CheckpointAnomaly
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of anomaly detected.
|
||||
/// </summary>
|
||||
public required AnomalyType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity of the anomaly.
|
||||
/// </summary>
|
||||
public required AnomalySeverity Severity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable description.
|
||||
/// </summary>
|
||||
public required string Description { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The checkpoint that triggered the anomaly.
|
||||
/// </summary>
|
||||
public required Guid CheckpointId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference checkpoint (if comparison-based).
|
||||
/// </summary>
|
||||
public Guid? ReferenceCheckpointId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected value (for mismatch anomalies).
|
||||
/// </summary>
|
||||
public string? ExpectedValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actual value (for mismatch anomalies).
|
||||
/// </summary>
|
||||
public string? ActualValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the anomaly was detected.
|
||||
/// </summary>
|
||||
public required DateTimeOffset DetectedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of checkpoint anomaly.
|
||||
/// </summary>
|
||||
public enum AnomalyType
|
||||
{
|
||||
/// <summary>
|
||||
/// Root hash mismatch at same tree size.
|
||||
/// </summary>
|
||||
RootHashMismatch,
|
||||
|
||||
/// <summary>
|
||||
/// Tree size decreased (rollback attempt).
|
||||
/// </summary>
|
||||
TreeSizeRollback,
|
||||
|
||||
/// <summary>
|
||||
/// Tree size did not increase when expected.
|
||||
/// </summary>
|
||||
StaleTreeSize,
|
||||
|
||||
/// <summary>
|
||||
/// Primary and mirror logs have different roots at same size.
|
||||
/// </summary>
|
||||
CrossLogDivergence,
|
||||
|
||||
/// <summary>
|
||||
/// Checkpoint signature invalid or from unknown key.
|
||||
/// </summary>
|
||||
InvalidSignature,
|
||||
|
||||
/// <summary>
|
||||
/// Checkpoint is older than expected freshness threshold.
|
||||
/// </summary>
|
||||
StaleCheckpoint,
|
||||
|
||||
/// <summary>
|
||||
/// Consistency proof between two checkpoints failed.
|
||||
/// </summary>
|
||||
ConsistencyProofFailure,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Severity of an anomaly.
|
||||
/// </summary>
|
||||
public enum AnomalySeverity
|
||||
{
|
||||
/// <summary>
|
||||
/// No anomaly.
|
||||
/// </summary>
|
||||
None = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Informational only.
|
||||
/// </summary>
|
||||
Info = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Warning - investigate but not blocking.
|
||||
/// </summary>
|
||||
Warning = 2,
|
||||
|
||||
/// <summary>
|
||||
/// Error - should block operations.
|
||||
/// </summary>
|
||||
Error = 3,
|
||||
|
||||
/// <summary>
|
||||
/// Critical - indicates Byzantine behavior, must alert immediately.
|
||||
/// </summary>
|
||||
Critical = 4,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recommended action for divergence.
|
||||
/// </summary>
|
||||
public enum DivergenceAction
|
||||
{
|
||||
/// <summary>
|
||||
/// No action needed.
|
||||
/// </summary>
|
||||
None,
|
||||
|
||||
/// <summary>
|
||||
/// Log for investigation.
|
||||
/// </summary>
|
||||
Log,
|
||||
|
||||
/// <summary>
|
||||
/// Send alert notification.
|
||||
/// </summary>
|
||||
Alert,
|
||||
|
||||
/// <summary>
|
||||
/// Quarantine affected entries and alert.
|
||||
/// </summary>
|
||||
QuarantineAndAlert,
|
||||
|
||||
/// <summary>
|
||||
/// Reject operations and alert.
|
||||
/// </summary>
|
||||
RejectAndAlert,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of cross-log consistency check.
|
||||
/// </summary>
|
||||
public sealed record CrossLogConsistencyResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether primary and mirror are consistent.
|
||||
/// </summary>
|
||||
public required bool IsConsistent { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tree size at which comparison was made.
|
||||
/// </summary>
|
||||
public required long ComparedAtTreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Primary log origin.
|
||||
/// </summary>
|
||||
public required string PrimaryOrigin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Mirror log origin.
|
||||
/// </summary>
|
||||
public required string MirrorOrigin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Divergence details if not consistent.
|
||||
/// </summary>
|
||||
public CheckpointAnomaly? Divergence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of monotonicity check.
|
||||
/// </summary>
|
||||
public sealed record MonotonicityCheckResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether monotonicity is maintained.
|
||||
/// </summary>
|
||||
public required bool IsMaintained { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Previous tree size.
|
||||
/// </summary>
|
||||
public required long PreviousTreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// New tree size.
|
||||
/// </summary>
|
||||
public required long NewTreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of new entries (delta).
|
||||
/// </summary>
|
||||
public long Delta => NewTreeSize - PreviousTreeSize;
|
||||
|
||||
/// <summary>
|
||||
/// Violation details if not maintained.
|
||||
/// </summary>
|
||||
public CheckpointAnomaly? Violation { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Health status of a transparency log.
|
||||
/// </summary>
|
||||
public sealed record LogHealthStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Log origin.
|
||||
/// </summary>
|
||||
public required string Origin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall health state.
|
||||
/// </summary>
|
||||
public required LogHealthState State { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest checkpoint tree size.
|
||||
/// </summary>
|
||||
public required long LatestTreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Age of latest checkpoint.
|
||||
/// </summary>
|
||||
public required TimeSpan CheckpointAge { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of anomalies in the last 24 hours.
|
||||
/// </summary>
|
||||
public required int RecentAnomalyCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Most recent anomaly.
|
||||
/// </summary>
|
||||
public CheckpointAnomaly? LatestAnomaly { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When health was last evaluated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Health state of a log.
|
||||
/// </summary>
|
||||
public enum LogHealthState
|
||||
{
|
||||
/// <summary>
|
||||
/// Log is healthy and up-to-date.
|
||||
/// </summary>
|
||||
Healthy,
|
||||
|
||||
/// <summary>
|
||||
/// Log has warnings but is operational.
|
||||
/// </summary>
|
||||
Degraded,
|
||||
|
||||
/// <summary>
|
||||
/// Log has critical issues.
|
||||
/// </summary>
|
||||
Unhealthy,
|
||||
|
||||
/// <summary>
|
||||
/// Log status is unknown (no recent data).
|
||||
/// </summary>
|
||||
Unknown,
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IRekorCheckpointStore.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Tasks: REKOR-SYNC-001
|
||||
// Description: Interface for persistent storage of Rekor checkpoints.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Persistent storage for Rekor log checkpoints.
|
||||
/// Used to track sync state and detect divergence/rollback.
|
||||
/// </summary>
|
||||
public interface IRekorCheckpointStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the latest checkpoint for a given origin.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The latest stored checkpoint, or null if none exists.</returns>
|
||||
Task<StoredCheckpoint?> GetLatestCheckpointAsync(
|
||||
string origin,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the checkpoint at a specific tree size.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="treeSize">The tree size to query.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The checkpoint at this tree size, or null if not found.</returns>
|
||||
Task<StoredCheckpoint?> GetCheckpointAtSizeAsync(
|
||||
string origin,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stores a new checkpoint.
|
||||
/// </summary>
|
||||
/// <param name="checkpoint">The checkpoint to store.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if stored (new or updated), false if duplicate.</returns>
|
||||
Task<bool> StoreCheckpointAsync(
|
||||
StoredCheckpoint checkpoint,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Marks a checkpoint as verified.
|
||||
/// </summary>
|
||||
/// <param name="checkpointId">The checkpoint ID.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task MarkVerifiedAsync(
|
||||
Guid checkpointId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets checkpoints in a range for consistency verification.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="fromSize">Start of range (inclusive).</param>
|
||||
/// <param name="toSize">End of range (inclusive).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Checkpoints in the range, ordered by tree size.</returns>
|
||||
Task<IReadOnlyList<StoredCheckpoint>> GetCheckpointsInRangeAsync(
|
||||
string origin,
|
||||
long fromSize,
|
||||
long toSize,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes checkpoints older than the specified date.
|
||||
/// </summary>
|
||||
/// <param name="olderThan">Delete checkpoints fetched before this time.</param>
|
||||
/// <param name="keepLatestPerOrigin">Keep the latest checkpoint per origin.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of checkpoints deleted.</returns>
|
||||
Task<int> PruneOldCheckpointsAsync(
|
||||
DateTimeOffset olderThan,
|
||||
bool keepLatestPerOrigin = true,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A stored Rekor checkpoint.
|
||||
/// </summary>
|
||||
public sealed record StoredCheckpoint
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for this stored checkpoint.
|
||||
/// </summary>
|
||||
public required Guid CheckpointId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The log origin identifier.
|
||||
/// </summary>
|
||||
public required string Origin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tree size at this checkpoint.
|
||||
/// </summary>
|
||||
public required long TreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Root hash of the Merkle tree.
|
||||
/// </summary>
|
||||
public required byte[] RootHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw checkpoint text for re-verification.
|
||||
/// </summary>
|
||||
public required string RawCheckpoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signature bytes.
|
||||
/// </summary>
|
||||
public required byte[] Signature { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this checkpoint was fetched.
|
||||
/// </summary>
|
||||
public required DateTimeOffset FetchedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the signature has been verified.
|
||||
/// </summary>
|
||||
public bool Verified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional verification timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? VerifiedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IRekorTileCache.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Tasks: REKOR-SYNC-003
|
||||
// Description: Interface for caching Rekor Merkle tree tiles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Cache for Rekor Merkle tree tiles.
|
||||
/// Enables offline verification by storing tiles locally.
|
||||
/// </summary>
|
||||
public interface IRekorTileCache
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a cached tile.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="level">The tree level (0 = leaves).</param>
|
||||
/// <param name="index">The tile index at this level.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The cached tile data, or null if not cached.</returns>
|
||||
Task<CachedTile?> GetTileAsync(
|
||||
string origin,
|
||||
int level,
|
||||
long index,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stores a tile in the cache.
|
||||
/// </summary>
|
||||
/// <param name="tile">The tile to cache.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task StoreTileAsync(
|
||||
CachedTile tile,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a tile is cached.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="level">The tree level.</param>
|
||||
/// <param name="index">The tile index.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the tile is cached.</returns>
|
||||
Task<bool> HasTileAsync(
|
||||
string origin,
|
||||
int level,
|
||||
long index,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets cache statistics for an origin.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Cache statistics.</returns>
|
||||
Task<TileCacheStats> GetStatsAsync(
|
||||
string origin,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Prunes old or partial tiles from the cache.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier, or null for all origins.</param>
|
||||
/// <param name="olderThan">Prune tiles cached before this time.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of tiles pruned.</returns>
|
||||
Task<int> PruneAsync(
|
||||
string? origin,
|
||||
DateTimeOffset olderThan,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Lists missing tiles needed for verification up to a tree size.
|
||||
/// </summary>
|
||||
/// <param name="origin">The log origin identifier.</param>
|
||||
/// <param name="treeSize">The tree size to verify up to.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of missing tile coordinates.</returns>
|
||||
Task<IReadOnlyList<TileCoordinate>> GetMissingTilesAsync(
|
||||
string origin,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A cached Merkle tree tile.
|
||||
/// </summary>
|
||||
public sealed record CachedTile
|
||||
{
|
||||
/// <summary>
|
||||
/// The log origin identifier.
|
||||
/// </summary>
|
||||
public required string Origin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The tree level (0 = leaf level).
|
||||
/// </summary>
|
||||
public required int Level { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The tile index at this level.
|
||||
/// </summary>
|
||||
public required long Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of hashes in this tile (may be partial).
|
||||
/// </summary>
|
||||
public required int Width { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The hash data (32 bytes per hash).
|
||||
/// </summary>
|
||||
public required byte[] Hashes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this tile was cached.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CachedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this is a partial tile (at the edge of the tree).
|
||||
/// </summary>
|
||||
public bool IsPartial { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tree size when this tile was fetched.
|
||||
/// </summary>
|
||||
public long? FetchedAtTreeSize { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Coordinates for a tile in the Merkle tree.
|
||||
/// </summary>
|
||||
public readonly record struct TileCoordinate(int Level, long Index);
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about cached tiles for an origin.
|
||||
/// </summary>
|
||||
public sealed record TileCacheStats
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of cached tiles.
|
||||
/// </summary>
|
||||
public required int TotalTiles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total bytes of cached tile data.
|
||||
/// </summary>
|
||||
public required long TotalBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of partial tiles (at tree edge).
|
||||
/// </summary>
|
||||
public required int PartialTiles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Oldest tile cache timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? OldestTile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Newest tile cache timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? NewestTile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum tree size covered by cached tiles.
|
||||
/// </summary>
|
||||
public long MaxTreeSizeCovered { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,362 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorSyncBackgroundService.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Tasks: REKOR-SYNC-005, REKOR-SYNC-006, REKOR-SYNC-007, REKOR-SYNC-008, REKOR-SYNC-009
|
||||
// Description: Background service for periodic Rekor checkpoint and tile synchronization.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that periodically synchronizes Rekor checkpoints and tiles.
|
||||
/// Enables offline verification by maintaining local copies of log data.
|
||||
/// </summary>
|
||||
public sealed class RekorSyncBackgroundService : BackgroundService
|
||||
{
|
||||
private readonly IRekorTileClient _tileClient;
|
||||
private readonly IRekorCheckpointStore _checkpointStore;
|
||||
private readonly IRekorTileCache _tileCache;
|
||||
private readonly IRekorCheckpointVerifier _checkpointVerifier;
|
||||
private readonly RekorSyncOptions _options;
|
||||
private readonly ILogger<RekorSyncBackgroundService> _logger;
|
||||
|
||||
// Metrics
|
||||
private static readonly Meter Meter = new("StellaOps.Attestor.RekorSync", "1.0.0");
|
||||
private static readonly Counter<long> CheckpointsFetched = Meter.CreateCounter<long>(
|
||||
"attestor.rekor_sync_checkpoints_fetched",
|
||||
description: "Total number of checkpoints fetched");
|
||||
private static readonly Counter<long> TilesFetched = Meter.CreateCounter<long>(
|
||||
"attestor.rekor_sync_tiles_fetched",
|
||||
description: "Total number of tiles fetched");
|
||||
private static readonly Histogram<double> CheckpointAgeSeconds = Meter.CreateHistogram<double>(
|
||||
"attestor.rekor_sync_checkpoint_age_seconds",
|
||||
unit: "s",
|
||||
description: "Age of the latest synced checkpoint in seconds");
|
||||
private static readonly ObservableGauge<long> TilesCached = Meter.CreateObservableGauge<long>(
|
||||
"attestor.rekor_sync_tiles_cached",
|
||||
observeValue: () => _lastTilesCachedCount,
|
||||
description: "Number of tiles currently cached");
|
||||
|
||||
private static long _lastTilesCachedCount;
|
||||
|
||||
public RekorSyncBackgroundService(
|
||||
IRekorTileClient tileClient,
|
||||
IRekorCheckpointStore checkpointStore,
|
||||
IRekorTileCache tileCache,
|
||||
IRekorCheckpointVerifier checkpointVerifier,
|
||||
IOptions<RekorSyncOptions> options,
|
||||
ILogger<RekorSyncBackgroundService> logger)
|
||||
{
|
||||
_tileClient = tileClient;
|
||||
_checkpointStore = checkpointStore;
|
||||
_tileCache = tileCache;
|
||||
_checkpointVerifier = checkpointVerifier;
|
||||
_options = options.Value;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogInformation("Rekor sync service is disabled");
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Rekor sync service started with interval {Interval} for {BackendCount} backend(s)",
|
||||
_options.SyncInterval,
|
||||
_options.Backends.Count);
|
||||
|
||||
// Initial delay before first sync
|
||||
await Task.Delay(_options.InitialDelay, stoppingToken);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await SyncAllBackendsAsync(stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error during Rekor sync cycle");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await Task.Delay(_options.SyncInterval, stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogInformation("Rekor sync service stopped");
|
||||
}
|
||||
|
||||
private async Task SyncAllBackendsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
|
||||
foreach (var backend in _options.Backends)
|
||||
{
|
||||
try
|
||||
{
|
||||
await SyncBackendAsync(backend, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to sync backend {BackendUrl}", backend.Url);
|
||||
}
|
||||
}
|
||||
|
||||
_logger.LogDebug("Rekor sync cycle completed in {ElapsedMs}ms", sw.ElapsedMilliseconds);
|
||||
}
|
||||
|
||||
private async Task SyncBackendAsync(RekorBackend backend, CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogDebug("Syncing Rekor backend {BackendUrl}", backend.Url);
|
||||
|
||||
// Step 1: Fetch latest checkpoint
|
||||
var checkpoint = await _tileClient.GetCheckpointAsync(backend, cancellationToken);
|
||||
if (checkpoint == null)
|
||||
{
|
||||
_logger.LogWarning("No checkpoint available from {BackendUrl}", backend.Url);
|
||||
return;
|
||||
}
|
||||
|
||||
CheckpointsFetched.Add(1, new KeyValuePair<string, object?>("origin", checkpoint.Origin));
|
||||
|
||||
// Step 2: Verify checkpoint signature
|
||||
var verificationResult = await _checkpointVerifier.VerifyCheckpointAsync(
|
||||
checkpoint,
|
||||
backend,
|
||||
cancellationToken);
|
||||
|
||||
if (!verificationResult.IsValid)
|
||||
{
|
||||
_logger.LogError(
|
||||
"Checkpoint signature verification failed for {Origin}: {Error}",
|
||||
checkpoint.Origin,
|
||||
verificationResult.Error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 3: Store checkpoint
|
||||
var stored = new StoredCheckpoint
|
||||
{
|
||||
CheckpointId = Guid.NewGuid(),
|
||||
Origin = checkpoint.Origin,
|
||||
TreeSize = checkpoint.TreeSize,
|
||||
RootHash = checkpoint.RootHash,
|
||||
RawCheckpoint = checkpoint.RawCheckpoint,
|
||||
Signature = checkpoint.Signatures.FirstOrDefault()?.Signature ?? [],
|
||||
FetchedAt = DateTimeOffset.UtcNow,
|
||||
Verified = verificationResult.IsValid,
|
||||
VerifiedAt = verificationResult.IsValid ? DateTimeOffset.UtcNow : null,
|
||||
};
|
||||
|
||||
var isNew = await _checkpointStore.StoreCheckpointAsync(stored, cancellationToken);
|
||||
|
||||
if (isNew)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"New checkpoint stored: {Origin} at tree size {TreeSize}",
|
||||
checkpoint.Origin,
|
||||
checkpoint.TreeSize);
|
||||
}
|
||||
|
||||
// Record checkpoint age metric
|
||||
var age = (DateTimeOffset.UtcNow - stored.FetchedAt).TotalSeconds;
|
||||
CheckpointAgeSeconds.Record(age, new KeyValuePair<string, object?>("origin", checkpoint.Origin));
|
||||
|
||||
// Step 4: Incremental tile sync
|
||||
if (_options.EnableTileSync)
|
||||
{
|
||||
await SyncTilesAsync(backend, checkpoint, cancellationToken);
|
||||
}
|
||||
|
||||
// Update tiles cached metric
|
||||
var stats = await _tileCache.GetStatsAsync(checkpoint.Origin, cancellationToken);
|
||||
_lastTilesCachedCount = stats.TotalTiles;
|
||||
}
|
||||
|
||||
private async Task SyncTilesAsync(
|
||||
RekorBackend backend,
|
||||
RekorTileCheckpoint checkpoint,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Get the previous checkpoint to determine what's new
|
||||
var previousCheckpoint = await _checkpointStore.GetLatestCheckpointAsync(
|
||||
checkpoint.Origin,
|
||||
cancellationToken);
|
||||
|
||||
var startIndex = previousCheckpoint?.TreeSize ?? 0;
|
||||
var endIndex = checkpoint.TreeSize;
|
||||
|
||||
if (startIndex >= endIndex)
|
||||
{
|
||||
_logger.LogDebug("No new entries to sync for {Origin}", checkpoint.Origin);
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Syncing tiles for entries {StartIndex} to {EndIndex} ({Count} entries)",
|
||||
startIndex, endIndex, endIndex - startIndex);
|
||||
|
||||
// Get list of missing tiles
|
||||
var missingTiles = await _tileCache.GetMissingTilesAsync(
|
||||
checkpoint.Origin,
|
||||
checkpoint.TreeSize,
|
||||
cancellationToken);
|
||||
|
||||
if (missingTiles.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("All tiles cached for {Origin} up to tree size {TreeSize}",
|
||||
checkpoint.Origin, checkpoint.TreeSize);
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogDebug("Fetching {Count} missing tiles for {Origin}", missingTiles.Count, checkpoint.Origin);
|
||||
|
||||
// Limit the number of tiles to fetch per sync cycle
|
||||
var tilesToFetch = missingTiles.Take(_options.MaxTilesPerSync).ToList();
|
||||
|
||||
foreach (var coord in tilesToFetch)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tileData = await _tileClient.GetTileAsync(
|
||||
backend,
|
||||
coord.Level,
|
||||
coord.Index,
|
||||
cancellationToken);
|
||||
|
||||
if (tileData != null)
|
||||
{
|
||||
var cachedTile = new CachedTile
|
||||
{
|
||||
Origin = checkpoint.Origin,
|
||||
Level = tileData.Level,
|
||||
Index = tileData.Index,
|
||||
Width = tileData.Width,
|
||||
Hashes = tileData.Hashes,
|
||||
CachedAt = DateTimeOffset.UtcNow,
|
||||
IsPartial = tileData.Width < 256,
|
||||
FetchedAtTreeSize = checkpoint.TreeSize,
|
||||
};
|
||||
|
||||
await _tileCache.StoreTileAsync(cachedTile, cancellationToken);
|
||||
TilesFetched.Add(1, new KeyValuePair<string, object?>("origin", checkpoint.Origin));
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to fetch tile {Level}/{Index} for {Origin}",
|
||||
coord.Level, coord.Index, checkpoint.Origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for Rekor sync service.
|
||||
/// </summary>
|
||||
public sealed record RekorSyncOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the sync service is enabled.
|
||||
/// </summary>
|
||||
public bool Enabled { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Interval between sync cycles.
|
||||
/// </summary>
|
||||
public TimeSpan SyncInterval { get; init; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Initial delay before first sync.
|
||||
/// </summary>
|
||||
public TimeSpan InitialDelay { get; init; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Whether to sync tiles (in addition to checkpoints).
|
||||
/// </summary>
|
||||
public bool EnableTileSync { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of tiles to fetch per sync cycle.
|
||||
/// </summary>
|
||||
public int MaxTilesPerSync { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Rekor backends to sync.
|
||||
/// </summary>
|
||||
public IReadOnlyList<RekorBackend> Backends { get; init; } = new[]
|
||||
{
|
||||
new RekorBackend
|
||||
{
|
||||
Name = "sigstore-prod",
|
||||
Url = new Uri("https://rekor.sigstore.dev"),
|
||||
TileBaseUrl = new Uri("https://rekor.sigstore.dev/api/v1/log/tiles"),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for verifying Rekor checkpoint signatures.
|
||||
/// </summary>
|
||||
public interface IRekorCheckpointVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies a checkpoint's signature against trusted keys.
|
||||
/// </summary>
|
||||
Task<CheckpointVerificationResult> VerifyCheckpointAsync(
|
||||
RekorTileCheckpoint checkpoint,
|
||||
RekorBackend backend,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of checkpoint verification.
|
||||
/// </summary>
|
||||
public sealed record CheckpointVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the checkpoint signature is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if verification failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID that verified the signature.
|
||||
/// </summary>
|
||||
public string? VerifyingKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful result.
|
||||
/// </summary>
|
||||
public static CheckpointVerificationResult Success(string? keyId = null) =>
|
||||
new() { IsValid = true, VerifyingKeyId = keyId };
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed result.
|
||||
/// </summary>
|
||||
public static CheckpointVerificationResult Failure(string error) =>
|
||||
new() { IsValid = false, Error = error };
|
||||
}
|
||||
@@ -8,6 +8,7 @@
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="JsonSchema.Net" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
<PackageReference Include="Sodium.Core" />
|
||||
|
||||
@@ -0,0 +1,593 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AiCodeGuardPredicate.cs
|
||||
// Sprint: SPRINT_20260112_010_ATTESTOR_ai_code_guard_predicate
|
||||
// Task: ATTESTOR-AIGUARD-001
|
||||
// Description: AI Code Guard predicate schema and models.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.Predicates.AiCodeGuard;
|
||||
|
||||
/// <summary>
|
||||
/// AI Code Guard predicate type constants.
|
||||
/// </summary>
|
||||
public static class AiCodeGuardPredicateTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Version 1 predicate type URI.
|
||||
/// </summary>
|
||||
public const string AiCodeGuardV1 = "https://stella-ops.org/predicates/ai-code-guard/v1";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// AI Code Guard attestation predicate.
|
||||
/// Attests to the results of AI-generated code security analysis.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardPredicate
|
||||
{
|
||||
/// <summary>
|
||||
/// The predicate type URI.
|
||||
/// </summary>
|
||||
public const string PredicateType = AiCodeGuardPredicateTypes.AiCodeGuardV1;
|
||||
|
||||
/// <summary>
|
||||
/// Schema version for forward compatibility.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public required string SchemaVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the analysis was performed (input, not wall-clock).
|
||||
/// </summary>
|
||||
[JsonPropertyName("analysisTimestamp")]
|
||||
public required DateTimeOffset AnalysisTimestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Scanner configuration used for analysis.
|
||||
/// </summary>
|
||||
[JsonPropertyName("scannerConfig")]
|
||||
public required AiCodeGuardScannerConfig ScannerConfig { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis inputs (files, commits, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("inputs")]
|
||||
public required AiCodeGuardInputs Inputs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected AI-generated code findings.
|
||||
/// </summary>
|
||||
[JsonPropertyName("findings")]
|
||||
public required ImmutableList<AiCodeGuardFinding> Findings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall verdict and summary.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verdict")]
|
||||
public required AiCodeGuardVerdict Verdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional policy overrides applied to findings.
|
||||
/// </summary>
|
||||
[JsonPropertyName("overrides")]
|
||||
public ImmutableList<AiCodeGuardOverride>? Overrides { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new predicate with schema version 1.0.
|
||||
/// </summary>
|
||||
public static AiCodeGuardPredicate CreateV1(
|
||||
DateTimeOffset analysisTimestamp,
|
||||
AiCodeGuardScannerConfig scannerConfig,
|
||||
AiCodeGuardInputs inputs,
|
||||
IEnumerable<AiCodeGuardFinding> findings,
|
||||
AiCodeGuardVerdict verdict,
|
||||
IEnumerable<AiCodeGuardOverride>? overrides = null)
|
||||
{
|
||||
return new AiCodeGuardPredicate
|
||||
{
|
||||
SchemaVersion = "1.0",
|
||||
AnalysisTimestamp = analysisTimestamp,
|
||||
ScannerConfig = scannerConfig,
|
||||
Inputs = inputs,
|
||||
Findings = findings.ToImmutableList(),
|
||||
Verdict = verdict,
|
||||
Overrides = overrides?.ToImmutableList(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Scanner configuration used for analysis.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardScannerConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Scanner version identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("scannerVersion")]
|
||||
public required string ScannerVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detection model version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("modelVersion")]
|
||||
public required string ModelVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detection confidence threshold (0.0-1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidenceThreshold")]
|
||||
public required double ConfidenceThreshold { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Enabled detection categories.
|
||||
/// </summary>
|
||||
[JsonPropertyName("enabledCategories")]
|
||||
public required ImmutableList<string> EnabledCategories { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule set identifiers applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("ruleSets")]
|
||||
public ImmutableList<string>? RuleSets { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Analysis inputs.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardInputs
|
||||
{
|
||||
/// <summary>
|
||||
/// Source repository information.
|
||||
/// </summary>
|
||||
[JsonPropertyName("repository")]
|
||||
public required AiCodeGuardRepository Repository { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Files analyzed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("files")]
|
||||
public required ImmutableList<AiCodeGuardFile> Files { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total lines of code analyzed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalLinesAnalyzed")]
|
||||
public required long TotalLinesAnalyzed { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Repository information.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Repository URI.
|
||||
/// </summary>
|
||||
[JsonPropertyName("uri")]
|
||||
public required string Uri { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Commit SHA being analyzed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("commitSha")]
|
||||
public required string CommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Branch name (optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("branch")]
|
||||
public string? Branch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tag name (optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("tag")]
|
||||
public string? Tag { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File information.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardFile
|
||||
{
|
||||
/// <summary>
|
||||
/// File path relative to repository root.
|
||||
/// </summary>
|
||||
[JsonPropertyName("path")]
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// File content hash (SHA-256).
|
||||
/// </summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of lines in file.
|
||||
/// </summary>
|
||||
[JsonPropertyName("lineCount")]
|
||||
public required int LineCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected programming language.
|
||||
/// </summary>
|
||||
[JsonPropertyName("language")]
|
||||
public string? Language { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// AI-generated code finding.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardFinding
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique finding identifier (stable across runs).
|
||||
/// </summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Finding category.
|
||||
/// </summary>
|
||||
[JsonPropertyName("category")]
|
||||
public required AiCodeGuardCategory Category { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Finding severity.
|
||||
/// </summary>
|
||||
[JsonPropertyName("severity")]
|
||||
public required AiCodeGuardSeverity Severity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detection confidence (0.0-1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Location in source code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("location")]
|
||||
public required AiCodeGuardLocation Location { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable description.
|
||||
/// </summary>
|
||||
[JsonPropertyName("description")]
|
||||
public required string Description { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rule that triggered this finding.
|
||||
/// </summary>
|
||||
[JsonPropertyName("ruleId")]
|
||||
public required string RuleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence supporting the finding.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidence")]
|
||||
public AiCodeGuardEvidence? Evidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Suggested remediation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("remediation")]
|
||||
public string? Remediation { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finding category.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum AiCodeGuardCategory
|
||||
{
|
||||
/// <summary>
|
||||
/// Likely AI-generated code detected.
|
||||
/// </summary>
|
||||
AiGenerated,
|
||||
|
||||
/// <summary>
|
||||
/// Known insecure pattern in AI-generated code.
|
||||
/// </summary>
|
||||
InsecurePattern,
|
||||
|
||||
/// <summary>
|
||||
/// Potential hallucination (non-existent API, etc.).
|
||||
/// </summary>
|
||||
Hallucination,
|
||||
|
||||
/// <summary>
|
||||
/// License violation risk.
|
||||
/// </summary>
|
||||
LicenseRisk,
|
||||
|
||||
/// <summary>
|
||||
/// Untrusted or unverified dependency introduced.
|
||||
/// </summary>
|
||||
UntrustedDependency,
|
||||
|
||||
/// <summary>
|
||||
/// Code quality issue typical of AI generation.
|
||||
/// </summary>
|
||||
QualityIssue,
|
||||
|
||||
/// <summary>
|
||||
/// Other/custom category.
|
||||
/// </summary>
|
||||
Other
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Finding severity.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum AiCodeGuardSeverity
|
||||
{
|
||||
/// <summary>
|
||||
/// Informational finding.
|
||||
/// </summary>
|
||||
Info,
|
||||
|
||||
/// <summary>
|
||||
/// Low severity.
|
||||
/// </summary>
|
||||
Low,
|
||||
|
||||
/// <summary>
|
||||
/// Medium severity.
|
||||
/// </summary>
|
||||
Medium,
|
||||
|
||||
/// <summary>
|
||||
/// High severity.
|
||||
/// </summary>
|
||||
High,
|
||||
|
||||
/// <summary>
|
||||
/// Critical severity.
|
||||
/// </summary>
|
||||
Critical
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Source code location.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardLocation
|
||||
{
|
||||
/// <summary>
|
||||
/// File path.
|
||||
/// </summary>
|
||||
[JsonPropertyName("file")]
|
||||
public required string File { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Start line (1-based).
|
||||
/// </summary>
|
||||
[JsonPropertyName("startLine")]
|
||||
public required int StartLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// End line (1-based).
|
||||
/// </summary>
|
||||
[JsonPropertyName("endLine")]
|
||||
public required int EndLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Start column (1-based, optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("startColumn")]
|
||||
public int? StartColumn { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// End column (1-based, optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("endColumn")]
|
||||
public int? EndColumn { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Code snippet (optional, for context).
|
||||
/// </summary>
|
||||
[JsonPropertyName("snippet")]
|
||||
public string? Snippet { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence supporting a finding.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Detection method used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("method")]
|
||||
public required string Method { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Indicators that led to this finding.
|
||||
/// </summary>
|
||||
[JsonPropertyName("indicators")]
|
||||
public required ImmutableList<string> Indicators { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Model perplexity score (if applicable).
|
||||
/// </summary>
|
||||
[JsonPropertyName("perplexityScore")]
|
||||
public double? PerplexityScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Similar known AI patterns matched.
|
||||
/// </summary>
|
||||
[JsonPropertyName("patternMatches")]
|
||||
public ImmutableList<string>? PatternMatches { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Overall analysis verdict.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardVerdict
|
||||
{
|
||||
/// <summary>
|
||||
/// Overall status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required AiCodeGuardVerdictStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total findings count.
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalFindings")]
|
||||
public required int TotalFindings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Findings count by severity.
|
||||
/// </summary>
|
||||
[JsonPropertyName("findingsBySeverity")]
|
||||
public required ImmutableDictionary<string, int> FindingsBySeverity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Estimated AI-generated code percentage (0-100).
|
||||
/// </summary>
|
||||
[JsonPropertyName("aiGeneratedPercentage")]
|
||||
public double? AiGeneratedPercentage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verdict message.
|
||||
/// </summary>
|
||||
[JsonPropertyName("message")]
|
||||
public required string Message { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Recommendation for policy decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("recommendation")]
|
||||
public AiCodeGuardRecommendation? Recommendation { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verdict status.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum AiCodeGuardVerdictStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Analysis passed - no blocking findings.
|
||||
/// </summary>
|
||||
Pass,
|
||||
|
||||
/// <summary>
|
||||
/// Analysis passed with warnings.
|
||||
/// </summary>
|
||||
PassWithWarnings,
|
||||
|
||||
/// <summary>
|
||||
/// Analysis failed - blocking findings present.
|
||||
/// </summary>
|
||||
Fail,
|
||||
|
||||
/// <summary>
|
||||
/// Analysis errored and could not complete.
|
||||
/// </summary>
|
||||
Error
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy recommendation.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum AiCodeGuardRecommendation
|
||||
{
|
||||
/// <summary>
|
||||
/// Allow to proceed.
|
||||
/// </summary>
|
||||
Allow,
|
||||
|
||||
/// <summary>
|
||||
/// Require manual review.
|
||||
/// </summary>
|
||||
RequireReview,
|
||||
|
||||
/// <summary>
|
||||
/// Block unless overridden.
|
||||
/// </summary>
|
||||
Block,
|
||||
|
||||
/// <summary>
|
||||
/// Quarantine for further analysis.
|
||||
/// </summary>
|
||||
Quarantine
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy override applied to a finding.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardOverride
|
||||
{
|
||||
/// <summary>
|
||||
/// Finding ID being overridden.
|
||||
/// </summary>
|
||||
[JsonPropertyName("findingId")]
|
||||
public required string FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Override action.
|
||||
/// </summary>
|
||||
[JsonPropertyName("action")]
|
||||
public required AiCodeGuardOverrideAction Action { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for the override.
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
public required string Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Who approved the override.
|
||||
/// </summary>
|
||||
[JsonPropertyName("approvedBy")]
|
||||
public required string ApprovedBy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the override was approved.
|
||||
/// </summary>
|
||||
[JsonPropertyName("approvedAt")]
|
||||
public required DateTimeOffset ApprovedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the override expires (optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("expiresAt")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Override action types.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum AiCodeGuardOverrideAction
|
||||
{
|
||||
/// <summary>
|
||||
/// Suppress the finding entirely.
|
||||
/// </summary>
|
||||
Suppress,
|
||||
|
||||
/// <summary>
|
||||
/// Downgrade severity.
|
||||
/// </summary>
|
||||
DowngradeSeverity,
|
||||
|
||||
/// <summary>
|
||||
/// Acknowledge and accept the risk.
|
||||
/// </summary>
|
||||
AcceptRisk,
|
||||
|
||||
/// <summary>
|
||||
/// Mark as false positive.
|
||||
/// </summary>
|
||||
FalsePositive
|
||||
}
|
||||
@@ -0,0 +1,659 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AiCodeGuardPredicateParser.cs
|
||||
// Sprint: SPRINT_20260112_010_ATTESTOR_ai_code_guard_predicate
|
||||
// Task: ATTESTOR-AIGUARD-002
|
||||
// Description: Predicate parser for AI Code Guard attestations.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Attestor.Predicates.AiCodeGuard;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for AI Code Guard predicate payloads.
|
||||
/// </summary>
|
||||
public sealed class AiCodeGuardPredicateParser : IAiCodeGuardPredicateParser
|
||||
{
|
||||
private readonly ILogger<AiCodeGuardPredicateParser> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
};
|
||||
|
||||
public AiCodeGuardPredicateParser(ILogger<AiCodeGuardPredicateParser> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string PredicateType => AiCodeGuardPredicateTypes.AiCodeGuardV1;
|
||||
|
||||
/// <inheritdoc />
|
||||
public AiCodeGuardParseResult Parse(JsonElement predicatePayload)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
try
|
||||
{
|
||||
// Validate required fields
|
||||
if (!TryGetRequiredString(predicatePayload, "schemaVersion", out var schemaVersion, errors))
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
if (!TryGetRequiredDateTime(predicatePayload, "analysisTimestamp", out var analysisTimestamp, errors))
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
if (!TryGetRequiredObject(predicatePayload, "scannerConfig", out var scannerConfigElement, errors))
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
if (!TryGetRequiredObject(predicatePayload, "inputs", out var inputsElement, errors))
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
if (!TryGetRequiredArray(predicatePayload, "findings", out var findingsElement, errors))
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
if (!TryGetRequiredObject(predicatePayload, "verdict", out var verdictElement, errors))
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
// Parse nested objects
|
||||
var scannerConfig = ParseScannerConfig(scannerConfigElement, errors);
|
||||
if (scannerConfig == null)
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
var inputs = ParseInputs(inputsElement, errors);
|
||||
if (inputs == null)
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
var findings = ParseFindings(findingsElement, errors);
|
||||
if (findings == null)
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
var verdict = ParseVerdict(verdictElement, errors);
|
||||
if (verdict == null)
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
|
||||
// Parse optional overrides
|
||||
ImmutableList<AiCodeGuardOverride>? overrides = null;
|
||||
if (predicatePayload.TryGetProperty("overrides", out var overridesElement) &&
|
||||
overridesElement.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
overrides = ParseOverrides(overridesElement, errors);
|
||||
}
|
||||
|
||||
if (errors.Count > 0)
|
||||
{
|
||||
return AiCodeGuardParseResult.PartialSuccess(
|
||||
AiCodeGuardPredicate.CreateV1(
|
||||
analysisTimestamp,
|
||||
scannerConfig,
|
||||
inputs,
|
||||
findings,
|
||||
verdict,
|
||||
overrides),
|
||||
errors);
|
||||
}
|
||||
|
||||
return AiCodeGuardParseResult.Success(
|
||||
AiCodeGuardPredicate.CreateV1(
|
||||
analysisTimestamp,
|
||||
scannerConfig,
|
||||
inputs,
|
||||
findings,
|
||||
verdict,
|
||||
overrides));
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to parse AI Code Guard predicate");
|
||||
errors.Add($"JSON parse error: {ex.Message}");
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Unexpected error parsing AI Code Guard predicate");
|
||||
errors.Add($"Unexpected error: {ex.Message}");
|
||||
return AiCodeGuardParseResult.Failed(errors);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValidationResult Validate(AiCodeGuardPredicate predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
var errors = new List<string>();
|
||||
|
||||
// Validate schema version
|
||||
if (string.IsNullOrWhiteSpace(predicate.SchemaVersion))
|
||||
errors.Add("schemaVersion is required");
|
||||
|
||||
// Validate timestamp is not in the future (with small tolerance)
|
||||
if (predicate.AnalysisTimestamp > DateTimeOffset.UtcNow.AddMinutes(5))
|
||||
errors.Add("analysisTimestamp cannot be in the future");
|
||||
|
||||
// Validate scanner config
|
||||
if (predicate.ScannerConfig.ConfidenceThreshold < 0 || predicate.ScannerConfig.ConfidenceThreshold > 1)
|
||||
errors.Add("confidenceThreshold must be between 0.0 and 1.0");
|
||||
|
||||
// Validate inputs
|
||||
if (predicate.Inputs.Files.Count == 0)
|
||||
errors.Add("inputs.files cannot be empty");
|
||||
|
||||
if (predicate.Inputs.TotalLinesAnalyzed < 0)
|
||||
errors.Add("inputs.totalLinesAnalyzed cannot be negative");
|
||||
|
||||
// Validate findings
|
||||
foreach (var finding in predicate.Findings)
|
||||
{
|
||||
if (finding.Confidence < 0 || finding.Confidence > 1)
|
||||
errors.Add($"Finding {finding.Id}: confidence must be between 0.0 and 1.0");
|
||||
|
||||
if (finding.Location.StartLine < 1)
|
||||
errors.Add($"Finding {finding.Id}: startLine must be at least 1");
|
||||
|
||||
if (finding.Location.EndLine < finding.Location.StartLine)
|
||||
errors.Add($"Finding {finding.Id}: endLine must be >= startLine");
|
||||
}
|
||||
|
||||
// Validate verdict
|
||||
if (predicate.Verdict.TotalFindings < 0)
|
||||
errors.Add("verdict.totalFindings cannot be negative");
|
||||
|
||||
if (predicate.Verdict.AiGeneratedPercentage.HasValue &&
|
||||
(predicate.Verdict.AiGeneratedPercentage < 0 || predicate.Verdict.AiGeneratedPercentage > 100))
|
||||
errors.Add("verdict.aiGeneratedPercentage must be between 0 and 100");
|
||||
|
||||
// Validate overrides
|
||||
if (predicate.Overrides != null)
|
||||
{
|
||||
var findingIds = predicate.Findings.Select(f => f.Id).ToHashSet();
|
||||
foreach (var @override in predicate.Overrides)
|
||||
{
|
||||
if (!findingIds.Contains(@override.FindingId))
|
||||
errors.Add($"Override references non-existent finding: {@override.FindingId}");
|
||||
|
||||
if (@override.ExpiresAt.HasValue && @override.ExpiresAt < @override.ApprovedAt)
|
||||
errors.Add($"Override for {@override.FindingId}: expiresAt cannot be before approvedAt");
|
||||
}
|
||||
}
|
||||
|
||||
return errors.Count == 0
|
||||
? ValidationResult.Valid()
|
||||
: ValidationResult.Invalid(errors);
|
||||
}
|
||||
|
||||
#region Private Parsing Methods
|
||||
|
||||
private AiCodeGuardScannerConfig? ParseScannerConfig(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredString(element, "scannerVersion", out var scannerVersion, errors, "scannerConfig"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "modelVersion", out var modelVersion, errors, "scannerConfig"))
|
||||
return null;
|
||||
if (!TryGetRequiredDouble(element, "confidenceThreshold", out var threshold, errors, "scannerConfig"))
|
||||
return null;
|
||||
if (!TryGetRequiredStringArray(element, "enabledCategories", out var categories, errors, "scannerConfig"))
|
||||
return null;
|
||||
|
||||
var ruleSets = element.TryGetProperty("ruleSets", out var ruleSetsElement)
|
||||
? ParseStringArray(ruleSetsElement)
|
||||
: null;
|
||||
|
||||
return new AiCodeGuardScannerConfig
|
||||
{
|
||||
ScannerVersion = scannerVersion,
|
||||
ModelVersion = modelVersion,
|
||||
ConfidenceThreshold = threshold,
|
||||
EnabledCategories = categories.ToImmutableList(),
|
||||
RuleSets = ruleSets?.ToImmutableList(),
|
||||
};
|
||||
}
|
||||
|
||||
private AiCodeGuardInputs? ParseInputs(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredObject(element, "repository", out var repoElement, errors, "inputs"))
|
||||
return null;
|
||||
if (!TryGetRequiredArray(element, "files", out var filesElement, errors, "inputs"))
|
||||
return null;
|
||||
if (!TryGetRequiredLong(element, "totalLinesAnalyzed", out var totalLines, errors, "inputs"))
|
||||
return null;
|
||||
|
||||
var repository = ParseRepository(repoElement, errors);
|
||||
if (repository == null) return null;
|
||||
|
||||
var files = ParseFiles(filesElement, errors);
|
||||
if (files == null) return null;
|
||||
|
||||
return new AiCodeGuardInputs
|
||||
{
|
||||
Repository = repository,
|
||||
Files = files.ToImmutableList(),
|
||||
TotalLinesAnalyzed = totalLines,
|
||||
};
|
||||
}
|
||||
|
||||
private AiCodeGuardRepository? ParseRepository(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredString(element, "uri", out var uri, errors, "repository"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "commitSha", out var commitSha, errors, "repository"))
|
||||
return null;
|
||||
|
||||
return new AiCodeGuardRepository
|
||||
{
|
||||
Uri = uri,
|
||||
CommitSha = commitSha,
|
||||
Branch = TryGetOptionalString(element, "branch"),
|
||||
Tag = TryGetOptionalString(element, "tag"),
|
||||
};
|
||||
}
|
||||
|
||||
private List<AiCodeGuardFile>? ParseFiles(JsonElement element, List<string> errors)
|
||||
{
|
||||
var files = new List<AiCodeGuardFile>();
|
||||
foreach (var fileElement in element.EnumerateArray())
|
||||
{
|
||||
if (!TryGetRequiredString(fileElement, "path", out var path, errors, "file"))
|
||||
continue;
|
||||
if (!TryGetRequiredString(fileElement, "digest", out var digest, errors, "file"))
|
||||
continue;
|
||||
if (!TryGetRequiredInt(fileElement, "lineCount", out var lineCount, errors, "file"))
|
||||
continue;
|
||||
|
||||
files.Add(new AiCodeGuardFile
|
||||
{
|
||||
Path = path,
|
||||
Digest = digest,
|
||||
LineCount = lineCount,
|
||||
Language = TryGetOptionalString(fileElement, "language"),
|
||||
});
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
private List<AiCodeGuardFinding>? ParseFindings(JsonElement element, List<string> errors)
|
||||
{
|
||||
var findings = new List<AiCodeGuardFinding>();
|
||||
foreach (var findingElement in element.EnumerateArray())
|
||||
{
|
||||
var finding = ParseFinding(findingElement, errors);
|
||||
if (finding != null)
|
||||
findings.Add(finding);
|
||||
}
|
||||
return findings;
|
||||
}
|
||||
|
||||
private AiCodeGuardFinding? ParseFinding(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredString(element, "id", out var id, errors, "finding"))
|
||||
return null;
|
||||
if (!TryGetRequiredEnum<AiCodeGuardCategory>(element, "category", out var category, errors, "finding"))
|
||||
return null;
|
||||
if (!TryGetRequiredEnum<AiCodeGuardSeverity>(element, "severity", out var severity, errors, "finding"))
|
||||
return null;
|
||||
if (!TryGetRequiredDouble(element, "confidence", out var confidence, errors, "finding"))
|
||||
return null;
|
||||
if (!TryGetRequiredObject(element, "location", out var locationElement, errors, "finding"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "description", out var description, errors, "finding"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "ruleId", out var ruleId, errors, "finding"))
|
||||
return null;
|
||||
|
||||
var location = ParseLocation(locationElement, errors);
|
||||
if (location == null) return null;
|
||||
|
||||
AiCodeGuardEvidence? evidence = null;
|
||||
if (element.TryGetProperty("evidence", out var evidenceElement) &&
|
||||
evidenceElement.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
evidence = ParseEvidence(evidenceElement, errors);
|
||||
}
|
||||
|
||||
return new AiCodeGuardFinding
|
||||
{
|
||||
Id = id,
|
||||
Category = category,
|
||||
Severity = severity,
|
||||
Confidence = confidence,
|
||||
Location = location,
|
||||
Description = description,
|
||||
RuleId = ruleId,
|
||||
Evidence = evidence,
|
||||
Remediation = TryGetOptionalString(element, "remediation"),
|
||||
};
|
||||
}
|
||||
|
||||
private AiCodeGuardLocation? ParseLocation(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredString(element, "file", out var file, errors, "location"))
|
||||
return null;
|
||||
if (!TryGetRequiredInt(element, "startLine", out var startLine, errors, "location"))
|
||||
return null;
|
||||
if (!TryGetRequiredInt(element, "endLine", out var endLine, errors, "location"))
|
||||
return null;
|
||||
|
||||
return new AiCodeGuardLocation
|
||||
{
|
||||
File = file,
|
||||
StartLine = startLine,
|
||||
EndLine = endLine,
|
||||
StartColumn = TryGetOptionalInt(element, "startColumn"),
|
||||
EndColumn = TryGetOptionalInt(element, "endColumn"),
|
||||
Snippet = TryGetOptionalString(element, "snippet"),
|
||||
};
|
||||
}
|
||||
|
||||
private AiCodeGuardEvidence? ParseEvidence(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredString(element, "method", out var method, errors, "evidence"))
|
||||
return null;
|
||||
if (!TryGetRequiredStringArray(element, "indicators", out var indicators, errors, "evidence"))
|
||||
return null;
|
||||
|
||||
return new AiCodeGuardEvidence
|
||||
{
|
||||
Method = method,
|
||||
Indicators = indicators.ToImmutableList(),
|
||||
PerplexityScore = TryGetOptionalDouble(element, "perplexityScore"),
|
||||
PatternMatches = element.TryGetProperty("patternMatches", out var patterns)
|
||||
? ParseStringArray(patterns)?.ToImmutableList()
|
||||
: null,
|
||||
};
|
||||
}
|
||||
|
||||
private AiCodeGuardVerdict? ParseVerdict(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredEnum<AiCodeGuardVerdictStatus>(element, "status", out var status, errors, "verdict"))
|
||||
return null;
|
||||
if (!TryGetRequiredInt(element, "totalFindings", out var totalFindings, errors, "verdict"))
|
||||
return null;
|
||||
if (!TryGetRequiredObject(element, "findingsBySeverity", out var bySeverityElement, errors, "verdict"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "message", out var message, errors, "verdict"))
|
||||
return null;
|
||||
|
||||
var bySeverity = new Dictionary<string, int>();
|
||||
foreach (var prop in bySeverityElement.EnumerateObject())
|
||||
{
|
||||
if (prop.Value.TryGetInt32(out var count))
|
||||
bySeverity[prop.Name] = count;
|
||||
}
|
||||
|
||||
AiCodeGuardRecommendation? recommendation = null;
|
||||
if (element.TryGetProperty("recommendation", out var recElement) &&
|
||||
Enum.TryParse<AiCodeGuardRecommendation>(recElement.GetString(), true, out var rec))
|
||||
{
|
||||
recommendation = rec;
|
||||
}
|
||||
|
||||
return new AiCodeGuardVerdict
|
||||
{
|
||||
Status = status,
|
||||
TotalFindings = totalFindings,
|
||||
FindingsBySeverity = bySeverity.ToImmutableDictionary(),
|
||||
AiGeneratedPercentage = TryGetOptionalDouble(element, "aiGeneratedPercentage"),
|
||||
Message = message,
|
||||
Recommendation = recommendation,
|
||||
};
|
||||
}
|
||||
|
||||
private ImmutableList<AiCodeGuardOverride>? ParseOverrides(JsonElement element, List<string> errors)
|
||||
{
|
||||
var overrides = new List<AiCodeGuardOverride>();
|
||||
foreach (var overrideElement in element.EnumerateArray())
|
||||
{
|
||||
var @override = ParseOverride(overrideElement, errors);
|
||||
if (@override != null)
|
||||
overrides.Add(@override);
|
||||
}
|
||||
return overrides.ToImmutableList();
|
||||
}
|
||||
|
||||
private AiCodeGuardOverride? ParseOverride(JsonElement element, List<string> errors)
|
||||
{
|
||||
if (!TryGetRequiredString(element, "findingId", out var findingId, errors, "override"))
|
||||
return null;
|
||||
if (!TryGetRequiredEnum<AiCodeGuardOverrideAction>(element, "action", out var action, errors, "override"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "justification", out var justification, errors, "override"))
|
||||
return null;
|
||||
if (!TryGetRequiredString(element, "approvedBy", out var approvedBy, errors, "override"))
|
||||
return null;
|
||||
if (!TryGetRequiredDateTime(element, "approvedAt", out var approvedAt, errors, "override"))
|
||||
return null;
|
||||
|
||||
return new AiCodeGuardOverride
|
||||
{
|
||||
FindingId = findingId,
|
||||
Action = action,
|
||||
Justification = justification,
|
||||
ApprovedBy = approvedBy,
|
||||
ApprovedAt = approvedAt,
|
||||
ExpiresAt = TryGetOptionalDateTime(element, "expiresAt"),
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static bool TryGetRequiredString(JsonElement element, string propertyName, out string value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = string.Empty;
|
||||
if (!element.TryGetProperty(propertyName, out var prop) || prop.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
value = prop.GetString()!;
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredInt(JsonElement element, string propertyName, out int value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = 0;
|
||||
if (!element.TryGetProperty(propertyName, out var prop) || !prop.TryGetInt32(out value))
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredLong(JsonElement element, string propertyName, out long value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = 0;
|
||||
if (!element.TryGetProperty(propertyName, out var prop) || !prop.TryGetInt64(out value))
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredDouble(JsonElement element, string propertyName, out double value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = 0;
|
||||
if (!element.TryGetProperty(propertyName, out var prop) || !prop.TryGetDouble(out value))
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredDateTime(JsonElement element, string propertyName, out DateTimeOffset value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = default;
|
||||
if (!element.TryGetProperty(propertyName, out var prop) || !prop.TryGetDateTimeOffset(out value))
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredObject(JsonElement element, string propertyName, out JsonElement value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = default;
|
||||
if (!element.TryGetProperty(propertyName, out value) || value.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredArray(JsonElement element, string propertyName, out JsonElement value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = default;
|
||||
if (!element.TryGetProperty(propertyName, out value) || value.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredStringArray(JsonElement element, string propertyName, out List<string> value, List<string> errors, string? context = null)
|
||||
{
|
||||
value = new List<string>();
|
||||
if (!element.TryGetProperty(propertyName, out var prop) || prop.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required"));
|
||||
return false;
|
||||
}
|
||||
value = ParseStringArray(prop) ?? new List<string>();
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool TryGetRequiredEnum<T>(JsonElement element, string propertyName, out T value, List<string> errors, string? context = null) where T : struct, Enum
|
||||
{
|
||||
value = default;
|
||||
if (!element.TryGetProperty(propertyName, out var prop) ||
|
||||
prop.ValueKind != JsonValueKind.String ||
|
||||
!Enum.TryParse(prop.GetString(), true, out value))
|
||||
{
|
||||
errors.Add(FormatError(context, propertyName, "is required or invalid"));
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string? TryGetOptionalString(JsonElement element, string propertyName)
|
||||
{
|
||||
if (element.TryGetProperty(propertyName, out var prop) && prop.ValueKind == JsonValueKind.String)
|
||||
return prop.GetString();
|
||||
return null;
|
||||
}
|
||||
|
||||
private static int? TryGetOptionalInt(JsonElement element, string propertyName)
|
||||
{
|
||||
if (element.TryGetProperty(propertyName, out var prop) && prop.TryGetInt32(out var value))
|
||||
return value;
|
||||
return null;
|
||||
}
|
||||
|
||||
private static double? TryGetOptionalDouble(JsonElement element, string propertyName)
|
||||
{
|
||||
if (element.TryGetProperty(propertyName, out var prop) && prop.TryGetDouble(out var value))
|
||||
return value;
|
||||
return null;
|
||||
}
|
||||
|
||||
private static DateTimeOffset? TryGetOptionalDateTime(JsonElement element, string propertyName)
|
||||
{
|
||||
if (element.TryGetProperty(propertyName, out var prop) && prop.TryGetDateTimeOffset(out var value))
|
||||
return value;
|
||||
return null;
|
||||
}
|
||||
|
||||
private static List<string>? ParseStringArray(JsonElement element)
|
||||
{
|
||||
if (element.ValueKind != JsonValueKind.Array)
|
||||
return null;
|
||||
|
||||
var result = new List<string>();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
if (item.ValueKind == JsonValueKind.String)
|
||||
result.Add(item.GetString()!);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string FormatError(string? context, string propertyName, string message)
|
||||
{
|
||||
return string.IsNullOrEmpty(context)
|
||||
? $"{propertyName} {message}"
|
||||
: $"{context}.{propertyName} {message}";
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Interfaces and Models
|
||||
|
||||
/// <summary>
|
||||
/// Interface for AI Code Guard predicate parser.
|
||||
/// </summary>
|
||||
public interface IAiCodeGuardPredicateParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Predicate type URI this parser handles.
|
||||
/// </summary>
|
||||
string PredicateType { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Parse a predicate payload.
|
||||
/// </summary>
|
||||
AiCodeGuardParseResult Parse(JsonElement predicatePayload);
|
||||
|
||||
/// <summary>
|
||||
/// Validate a parsed predicate.
|
||||
/// </summary>
|
||||
ValidationResult Validate(AiCodeGuardPredicate predicate);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of parsing an AI Code Guard predicate.
|
||||
/// </summary>
|
||||
public sealed record AiCodeGuardParseResult
|
||||
{
|
||||
public bool IsSuccess { get; init; }
|
||||
public bool IsPartialSuccess { get; init; }
|
||||
public AiCodeGuardPredicate? Predicate { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = Array.Empty<string>();
|
||||
|
||||
public static AiCodeGuardParseResult Success(AiCodeGuardPredicate predicate) =>
|
||||
new() { IsSuccess = true, Predicate = predicate };
|
||||
|
||||
public static AiCodeGuardParseResult PartialSuccess(AiCodeGuardPredicate predicate, IEnumerable<string> errors) =>
|
||||
new() { IsSuccess = true, IsPartialSuccess = true, Predicate = predicate, Errors = errors.ToList() };
|
||||
|
||||
public static AiCodeGuardParseResult Failed(IEnumerable<string> errors) =>
|
||||
new() { IsSuccess = false, Errors = errors.ToList() };
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of validating an AI Code Guard predicate.
|
||||
/// </summary>
|
||||
public sealed record ValidationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public IReadOnlyList<string> Errors { get; init; } = Array.Empty<string>();
|
||||
|
||||
public static ValidationResult Valid() => new() { IsValid = true };
|
||||
public static ValidationResult Invalid(IEnumerable<string> errors) =>
|
||||
new() { IsValid = false, Errors = errors.ToList() };
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,329 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PostgresRekorCheckpointStore.cs
|
||||
// Sprint: SPRINT_20260112_017_ATTESTOR_periodic_rekor_sync
|
||||
// Task: REKOR-SYNC-002
|
||||
// Description: PostgreSQL implementation of IRekorCheckpointStore.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of the Rekor checkpoint store.
|
||||
/// </summary>
|
||||
public sealed class PostgresRekorCheckpointStore : IRekorCheckpointStore
|
||||
{
|
||||
private readonly string _connectionString;
|
||||
private readonly PostgresCheckpointStoreOptions _options;
|
||||
private readonly ILogger<PostgresRekorCheckpointStore> _logger;
|
||||
|
||||
public PostgresRekorCheckpointStore(
|
||||
IOptions<PostgresCheckpointStoreOptions> options,
|
||||
ILogger<PostgresRekorCheckpointStore> logger)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_connectionString = _options.ConnectionString
|
||||
?? throw new InvalidOperationException("ConnectionString is required");
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<StoredCheckpoint?> GetLatestCheckpointAsync(
|
||||
string origin,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(origin);
|
||||
|
||||
const string sql = @"
|
||||
SELECT checkpoint_id, origin, tree_size, root_hash, raw_checkpoint,
|
||||
signature, fetched_at, verified, verified_at
|
||||
FROM attestor.rekor_checkpoints
|
||||
WHERE origin = @origin
|
||||
ORDER BY tree_size DESC
|
||||
LIMIT 1";
|
||||
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("@origin", origin);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
if (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return MapCheckpoint(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<StoredCheckpoint?> GetCheckpointAtSizeAsync(
|
||||
string origin,
|
||||
long treeSize,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(origin);
|
||||
|
||||
const string sql = @"
|
||||
SELECT checkpoint_id, origin, tree_size, root_hash, raw_checkpoint,
|
||||
signature, fetched_at, verified, verified_at
|
||||
FROM attestor.rekor_checkpoints
|
||||
WHERE origin = @origin AND tree_size = @tree_size";
|
||||
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("@origin", origin);
|
||||
cmd.Parameters.AddWithValue("@tree_size", treeSize);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
if (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
return MapCheckpoint(reader);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> StoreCheckpointAsync(
|
||||
StoredCheckpoint checkpoint,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(checkpoint);
|
||||
|
||||
const string sql = @"
|
||||
INSERT INTO attestor.rekor_checkpoints
|
||||
(checkpoint_id, origin, tree_size, root_hash, raw_checkpoint,
|
||||
signature, fetched_at, verified, verified_at)
|
||||
VALUES
|
||||
(@checkpoint_id, @origin, @tree_size, @root_hash, @raw_checkpoint,
|
||||
@signature, @fetched_at, @verified, @verified_at)
|
||||
ON CONFLICT (origin, tree_size) DO UPDATE SET
|
||||
fetched_at = EXCLUDED.fetched_at,
|
||||
verified = EXCLUDED.verified,
|
||||
verified_at = EXCLUDED.verified_at
|
||||
RETURNING checkpoint_id";
|
||||
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
|
||||
cmd.Parameters.AddWithValue("@checkpoint_id", checkpoint.CheckpointId);
|
||||
cmd.Parameters.AddWithValue("@origin", checkpoint.Origin);
|
||||
cmd.Parameters.AddWithValue("@tree_size", checkpoint.TreeSize);
|
||||
cmd.Parameters.AddWithValue("@root_hash", checkpoint.RootHash);
|
||||
cmd.Parameters.AddWithValue("@raw_checkpoint", checkpoint.RawCheckpoint);
|
||||
cmd.Parameters.AddWithValue("@signature", checkpoint.Signature);
|
||||
cmd.Parameters.AddWithValue("@fetched_at", checkpoint.FetchedAt.ToUniversalTime());
|
||||
cmd.Parameters.AddWithValue("@verified", checkpoint.Verified);
|
||||
cmd.Parameters.AddWithValue("@verified_at",
|
||||
checkpoint.VerifiedAt.HasValue
|
||||
? checkpoint.VerifiedAt.Value.ToUniversalTime()
|
||||
: DBNull.Value);
|
||||
|
||||
var result = await cmd.ExecuteScalarAsync(cancellationToken);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Stored checkpoint for {Origin} at tree size {TreeSize}",
|
||||
checkpoint.Origin,
|
||||
checkpoint.TreeSize);
|
||||
|
||||
return result != null;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task MarkVerifiedAsync(
|
||||
Guid checkpointId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = @"
|
||||
UPDATE attestor.rekor_checkpoints
|
||||
SET verified = TRUE, verified_at = @verified_at
|
||||
WHERE checkpoint_id = @checkpoint_id";
|
||||
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
|
||||
cmd.Parameters.AddWithValue("@checkpoint_id", checkpointId);
|
||||
cmd.Parameters.AddWithValue("@verified_at", DateTimeOffset.UtcNow);
|
||||
|
||||
var rowsAffected = await cmd.ExecuteNonQueryAsync(cancellationToken);
|
||||
|
||||
if (rowsAffected == 0)
|
||||
{
|
||||
_logger.LogWarning("Checkpoint {CheckpointId} not found for verification", checkpointId);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogDebug("Marked checkpoint {CheckpointId} as verified", checkpointId);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<StoredCheckpoint>> GetCheckpointsInRangeAsync(
|
||||
string origin,
|
||||
long fromSize,
|
||||
long toSize,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(origin);
|
||||
|
||||
const string sql = @"
|
||||
SELECT checkpoint_id, origin, tree_size, root_hash, raw_checkpoint,
|
||||
signature, fetched_at, verified, verified_at
|
||||
FROM attestor.rekor_checkpoints
|
||||
WHERE origin = @origin
|
||||
AND tree_size >= @from_size
|
||||
AND tree_size <= @to_size
|
||||
ORDER BY tree_size ASC";
|
||||
|
||||
var results = new List<StoredCheckpoint>();
|
||||
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
|
||||
cmd.Parameters.AddWithValue("@origin", origin);
|
||||
cmd.Parameters.AddWithValue("@from_size", fromSize);
|
||||
cmd.Parameters.AddWithValue("@to_size", toSize);
|
||||
|
||||
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
|
||||
while (await reader.ReadAsync(cancellationToken))
|
||||
{
|
||||
results.Add(MapCheckpoint(reader));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> PruneOldCheckpointsAsync(
|
||||
DateTimeOffset olderThan,
|
||||
bool keepLatestPerOrigin = true,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
|
||||
string sql;
|
||||
if (keepLatestPerOrigin)
|
||||
{
|
||||
// Delete old checkpoints but keep the latest per origin
|
||||
sql = @"
|
||||
DELETE FROM attestor.rekor_checkpoints c
|
||||
WHERE c.fetched_at < @older_than
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM (
|
||||
SELECT checkpoint_id
|
||||
FROM attestor.rekor_checkpoints
|
||||
WHERE origin = c.origin
|
||||
ORDER BY tree_size DESC
|
||||
LIMIT 1
|
||||
) latest
|
||||
WHERE latest.checkpoint_id = c.checkpoint_id
|
||||
)";
|
||||
}
|
||||
else
|
||||
{
|
||||
sql = @"
|
||||
DELETE FROM attestor.rekor_checkpoints
|
||||
WHERE fetched_at < @older_than";
|
||||
}
|
||||
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
cmd.Parameters.AddWithValue("@older_than", olderThan.ToUniversalTime());
|
||||
|
||||
var rowsAffected = await cmd.ExecuteNonQueryAsync(cancellationToken);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Pruned {Count} old checkpoints (older than {OlderThan})",
|
||||
rowsAffected,
|
||||
olderThan);
|
||||
|
||||
return rowsAffected;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes the database schema if it doesn't exist.
|
||||
/// </summary>
|
||||
public async Task InitializeSchemaAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
const string sql = @"
|
||||
CREATE SCHEMA IF NOT EXISTS attestor;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS attestor.rekor_checkpoints (
|
||||
checkpoint_id UUID PRIMARY KEY,
|
||||
origin TEXT NOT NULL,
|
||||
tree_size BIGINT NOT NULL,
|
||||
root_hash BYTEA NOT NULL,
|
||||
raw_checkpoint TEXT NOT NULL,
|
||||
signature BYTEA NOT NULL,
|
||||
fetched_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
verified BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
verified_at TIMESTAMPTZ,
|
||||
|
||||
UNIQUE(origin, tree_size)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_checkpoints_origin_tree_size
|
||||
ON attestor.rekor_checkpoints(origin, tree_size DESC);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_checkpoints_fetched_at
|
||||
ON attestor.rekor_checkpoints(fetched_at);";
|
||||
|
||||
await using var conn = await OpenConnectionAsync(cancellationToken);
|
||||
await using var cmd = new NpgsqlCommand(sql, conn);
|
||||
|
||||
await cmd.ExecuteNonQueryAsync(cancellationToken);
|
||||
|
||||
_logger.LogInformation("Initialized Rekor checkpoint store schema");
|
||||
}
|
||||
|
||||
private async Task<NpgsqlConnection> OpenConnectionAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var conn = new NpgsqlConnection(_connectionString);
|
||||
await conn.OpenAsync(cancellationToken);
|
||||
return conn;
|
||||
}
|
||||
|
||||
private static StoredCheckpoint MapCheckpoint(NpgsqlDataReader reader)
|
||||
{
|
||||
return new StoredCheckpoint
|
||||
{
|
||||
CheckpointId = reader.GetGuid(0),
|
||||
Origin = reader.GetString(1),
|
||||
TreeSize = reader.GetInt64(2),
|
||||
RootHash = (byte[])reader[3],
|
||||
RawCheckpoint = reader.GetString(4),
|
||||
Signature = (byte[])reader[5],
|
||||
FetchedAt = reader.GetDateTime(6),
|
||||
Verified = reader.GetBoolean(7),
|
||||
VerifiedAt = reader.IsDBNull(8) ? null : reader.GetDateTime(8),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for PostgreSQL checkpoint store.
|
||||
/// </summary>
|
||||
public sealed class PostgresCheckpointStoreOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Connection string for PostgreSQL database.
|
||||
/// </summary>
|
||||
public string? ConnectionString { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Schema name for checkpoint tables.
|
||||
/// </summary>
|
||||
public string Schema { get; set; } = "attestor";
|
||||
|
||||
/// <summary>
|
||||
/// Automatically initialize schema on startup.
|
||||
/// </summary>
|
||||
public bool AutoInitializeSchema { get; set; } = true;
|
||||
}
|
||||
@@ -0,0 +1,642 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AiCodeGuardPredicateTests.cs
|
||||
// Sprint: SPRINT_20260112_010_ATTESTOR_ai_code_guard_predicate
|
||||
// Task: ATTESTOR-AIGUARD-003
|
||||
// Description: Tests for AI Code Guard predicate serialization and verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Predicates.AiCodeGuard;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for AI Code Guard predicate.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class AiCodeGuardPredicateTests
|
||||
{
|
||||
private readonly AiCodeGuardPredicateParser _parser;
|
||||
|
||||
public AiCodeGuardPredicateTests()
|
||||
{
|
||||
_parser = new AiCodeGuardPredicateParser(
|
||||
Mock.Of<ILogger<AiCodeGuardPredicateParser>>());
|
||||
}
|
||||
|
||||
#region Predicate Type Tests
|
||||
|
||||
[Fact]
|
||||
public void PredicateType_HasCorrectUri()
|
||||
{
|
||||
Assert.Equal(
|
||||
"https://stella-ops.org/predicates/ai-code-guard/v1",
|
||||
AiCodeGuardPredicateTypes.AiCodeGuardV1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PredicateType_MatchesStaticConstant()
|
||||
{
|
||||
Assert.Equal(
|
||||
AiCodeGuardPredicateTypes.AiCodeGuardV1,
|
||||
AiCodeGuardPredicate.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parser_PredicateType_MatchesConstant()
|
||||
{
|
||||
Assert.Equal(
|
||||
AiCodeGuardPredicateTypes.AiCodeGuardV1,
|
||||
_parser.PredicateType);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Serialization Tests
|
||||
|
||||
[Fact]
|
||||
public void Predicate_SerializesToDeterministicJson()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate();
|
||||
|
||||
// Act
|
||||
var json1 = JsonSerializer.Serialize(predicate, GetSerializerOptions());
|
||||
var json2 = JsonSerializer.Serialize(predicate, GetSerializerOptions());
|
||||
|
||||
// Assert - serialization must be deterministic
|
||||
Assert.Equal(json1, json2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Predicate_RoundTrips()
|
||||
{
|
||||
// Arrange
|
||||
var original = CreateValidPredicate();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(original, GetSerializerOptions());
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
var parseResult = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.True(parseResult.IsSuccess);
|
||||
Assert.NotNull(parseResult.Predicate);
|
||||
Assert.Equal(original.SchemaVersion, parseResult.Predicate.SchemaVersion);
|
||||
Assert.Equal(original.Findings.Count, parseResult.Predicate.Findings.Count);
|
||||
Assert.Equal(original.Verdict.Status, parseResult.Predicate.Verdict.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Predicate_SerializesEnumsAsStrings()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(predicate, GetSerializerOptions());
|
||||
|
||||
// Assert
|
||||
Assert.Contains("\"AiGenerated\"", json);
|
||||
Assert.Contains("\"High\"", json);
|
||||
Assert.Contains("\"Pass\"", json);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Parse Tests
|
||||
|
||||
[Fact]
|
||||
public void Parse_ValidPredicate_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var json = GetValidPredicateJson();
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsSuccess);
|
||||
Assert.NotNull(result.Predicate);
|
||||
Assert.Equal("1.0", result.Predicate.SchemaVersion);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_MissingSchemaVersion_ReturnsFailed()
|
||||
{
|
||||
// Arrange
|
||||
var json = """
|
||||
{
|
||||
"analysisTimestamp": "2026-01-15T12:00:00Z",
|
||||
"scannerConfig": {},
|
||||
"inputs": {},
|
||||
"findings": [],
|
||||
"verdict": {}
|
||||
}
|
||||
""";
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsSuccess);
|
||||
Assert.Contains(result.Errors, e => e.Contains("schemaVersion"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_InvalidCategory_ReturnsFailed()
|
||||
{
|
||||
// Arrange
|
||||
var json = GetValidPredicateJson()
|
||||
.Replace("\"AiGenerated\"", "\"InvalidCategory\"");
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsSuccess);
|
||||
Assert.Contains(result.Errors, e => e.Contains("category"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_InvalidSeverity_ReturnsFailed()
|
||||
{
|
||||
// Arrange
|
||||
var json = GetValidPredicateJson()
|
||||
.Replace("\"High\"", "\"SuperHigh\"");
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsSuccess);
|
||||
Assert.Contains(result.Errors, e => e.Contains("severity"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_EmptyFindings_ReturnsSuccess()
|
||||
{
|
||||
// Arrange - empty findings is valid (clean scan)
|
||||
var json = GetValidPredicateJson()
|
||||
.Replace(GetFindingsJson(), "[]");
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsSuccess);
|
||||
Assert.NotNull(result.Predicate);
|
||||
Assert.Empty(result.Predicate.Findings);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_WithOverrides_ParsesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var json = GetValidPredicateJsonWithOverrides();
|
||||
var element = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = _parser.Parse(element);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsSuccess);
|
||||
Assert.NotNull(result.Predicate);
|
||||
Assert.NotNull(result.Predicate.Overrides);
|
||||
Assert.Single(result.Predicate.Overrides);
|
||||
Assert.Equal("finding-001", result.Predicate.Overrides[0].FindingId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Validation Tests
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidPredicate_ReturnsValid()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate();
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_FutureTimestamp_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate() with
|
||||
{
|
||||
AnalysisTimestamp = DateTimeOffset.UtcNow.AddHours(1)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("future"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_InvalidConfidenceThreshold_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate() with
|
||||
{
|
||||
ScannerConfig = CreateValidPredicate().ScannerConfig with
|
||||
{
|
||||
ConfidenceThreshold = 1.5 // Invalid: > 1.0
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("confidenceThreshold"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_InvalidFindingConfidence_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var finding = CreateValidFinding() with { Confidence = -0.1 };
|
||||
var predicate = CreateValidPredicate() with
|
||||
{
|
||||
Findings = ImmutableList.Create(finding)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("confidence"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_InvalidLineRange_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var finding = CreateValidFinding() with
|
||||
{
|
||||
Location = new AiCodeGuardLocation
|
||||
{
|
||||
File = "test.cs",
|
||||
StartLine = 10,
|
||||
EndLine = 5 // Invalid: endLine < startLine
|
||||
}
|
||||
};
|
||||
var predicate = CreateValidPredicate() with
|
||||
{
|
||||
Findings = ImmutableList.Create(finding)
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("endLine"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_OverrideReferencesNonExistentFinding_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate() with
|
||||
{
|
||||
Overrides = ImmutableList.Create(new AiCodeGuardOverride
|
||||
{
|
||||
FindingId = "non-existent-finding",
|
||||
Action = AiCodeGuardOverrideAction.Suppress,
|
||||
Justification = "Test",
|
||||
ApprovedBy = "test@example.com",
|
||||
ApprovedAt = DateTimeOffset.UtcNow,
|
||||
})
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("non-existent finding"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_InvalidAiGeneratedPercentage_ReturnsInvalid()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreateValidPredicate() with
|
||||
{
|
||||
Verdict = CreateValidPredicate().Verdict with
|
||||
{
|
||||
AiGeneratedPercentage = 150 // Invalid: > 100
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _parser.Validate(predicate);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("aiGeneratedPercentage"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Fixture Methods
|
||||
|
||||
private static AiCodeGuardPredicate CreateValidPredicate()
|
||||
{
|
||||
return AiCodeGuardPredicate.CreateV1(
|
||||
analysisTimestamp: new DateTimeOffset(2026, 1, 15, 12, 0, 0, TimeSpan.Zero),
|
||||
scannerConfig: new AiCodeGuardScannerConfig
|
||||
{
|
||||
ScannerVersion = "1.0.0",
|
||||
ModelVersion = "gpt-detector-v2",
|
||||
ConfidenceThreshold = 0.7,
|
||||
EnabledCategories = ImmutableList.Create("AiGenerated", "InsecurePattern"),
|
||||
RuleSets = ImmutableList.Create("default", "security"),
|
||||
},
|
||||
inputs: new AiCodeGuardInputs
|
||||
{
|
||||
Repository = new AiCodeGuardRepository
|
||||
{
|
||||
Uri = "https://github.com/example/repo",
|
||||
CommitSha = "abc123def456",
|
||||
Branch = "main",
|
||||
},
|
||||
Files = ImmutableList.Create(
|
||||
new AiCodeGuardFile
|
||||
{
|
||||
Path = "src/Service.cs",
|
||||
Digest = "sha256:abc123",
|
||||
LineCount = 150,
|
||||
Language = "csharp",
|
||||
}),
|
||||
TotalLinesAnalyzed = 150,
|
||||
},
|
||||
findings: ImmutableList.Create(CreateValidFinding()),
|
||||
verdict: new AiCodeGuardVerdict
|
||||
{
|
||||
Status = AiCodeGuardVerdictStatus.Pass,
|
||||
TotalFindings = 1,
|
||||
FindingsBySeverity = ImmutableDictionary<string, int>.Empty
|
||||
.Add("High", 1),
|
||||
AiGeneratedPercentage = 25.5,
|
||||
Message = "Analysis complete. 1 finding detected.",
|
||||
Recommendation = AiCodeGuardRecommendation.RequireReview,
|
||||
});
|
||||
}
|
||||
|
||||
private static AiCodeGuardFinding CreateValidFinding()
|
||||
{
|
||||
return new AiCodeGuardFinding
|
||||
{
|
||||
Id = "finding-001",
|
||||
Category = AiCodeGuardCategory.AiGenerated,
|
||||
Severity = AiCodeGuardSeverity.High,
|
||||
Confidence = 0.85,
|
||||
Location = new AiCodeGuardLocation
|
||||
{
|
||||
File = "src/Service.cs",
|
||||
StartLine = 45,
|
||||
EndLine = 67,
|
||||
StartColumn = 1,
|
||||
EndColumn = 80,
|
||||
Snippet = "public void Process() { ... }",
|
||||
},
|
||||
Description = "Code block likely generated by AI assistant",
|
||||
RuleId = "AICG-001",
|
||||
Evidence = new AiCodeGuardEvidence
|
||||
{
|
||||
Method = "perplexity-analysis",
|
||||
Indicators = ImmutableList.Create(
|
||||
"Low perplexity score",
|
||||
"Characteristic formatting"),
|
||||
PerplexityScore = 12.5,
|
||||
PatternMatches = ImmutableList.Create("copilot-pattern-7"),
|
||||
},
|
||||
Remediation = "Review code for security vulnerabilities",
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetValidPredicateJson()
|
||||
{
|
||||
return """
|
||||
{
|
||||
"schemaVersion": "1.0",
|
||||
"analysisTimestamp": "2026-01-15T12:00:00Z",
|
||||
"scannerConfig": {
|
||||
"scannerVersion": "1.0.0",
|
||||
"modelVersion": "gpt-detector-v2",
|
||||
"confidenceThreshold": 0.7,
|
||||
"enabledCategories": ["AiGenerated", "InsecurePattern"],
|
||||
"ruleSets": ["default", "security"]
|
||||
},
|
||||
"inputs": {
|
||||
"repository": {
|
||||
"uri": "https://github.com/example/repo",
|
||||
"commitSha": "abc123def456",
|
||||
"branch": "main"
|
||||
},
|
||||
"files": [{
|
||||
"path": "src/Service.cs",
|
||||
"digest": "sha256:abc123",
|
||||
"lineCount": 150,
|
||||
"language": "csharp"
|
||||
}],
|
||||
"totalLinesAnalyzed": 150
|
||||
},
|
||||
"findings": [{
|
||||
"id": "finding-001",
|
||||
"category": "AiGenerated",
|
||||
"severity": "High",
|
||||
"confidence": 0.85,
|
||||
"location": {
|
||||
"file": "src/Service.cs",
|
||||
"startLine": 45,
|
||||
"endLine": 67
|
||||
},
|
||||
"description": "Code block likely generated by AI assistant",
|
||||
"ruleId": "AICG-001",
|
||||
"evidence": {
|
||||
"method": "perplexity-analysis",
|
||||
"indicators": ["Low perplexity score"],
|
||||
"perplexityScore": 12.5
|
||||
}
|
||||
}],
|
||||
"verdict": {
|
||||
"status": "Pass",
|
||||
"totalFindings": 1,
|
||||
"findingsBySeverity": { "High": 1 },
|
||||
"aiGeneratedPercentage": 25.5,
|
||||
"message": "Analysis complete",
|
||||
"recommendation": "RequireReview"
|
||||
}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private static string GetFindingsJson()
|
||||
{
|
||||
return """[{
|
||||
"id": "finding-001",
|
||||
"category": "AiGenerated",
|
||||
"severity": "High",
|
||||
"confidence": 0.85,
|
||||
"location": {
|
||||
"file": "src/Service.cs",
|
||||
"startLine": 45,
|
||||
"endLine": 67
|
||||
},
|
||||
"description": "Code block likely generated by AI assistant",
|
||||
"ruleId": "AICG-001",
|
||||
"evidence": {
|
||||
"method": "perplexity-analysis",
|
||||
"indicators": ["Low perplexity score"],
|
||||
"perplexityScore": 12.5
|
||||
}
|
||||
}]""";
|
||||
}
|
||||
|
||||
private static string GetValidPredicateJsonWithOverrides()
|
||||
{
|
||||
return GetValidPredicateJson().TrimEnd('}') + """,
|
||||
"overrides": [{
|
||||
"findingId": "finding-001",
|
||||
"action": "AcceptRisk",
|
||||
"justification": "Reviewed and approved by security team",
|
||||
"approvedBy": "security@example.com",
|
||||
"approvedAt": "2026-01-15T14:00:00Z",
|
||||
"expiresAt": "2026-02-15T14:00:00Z"
|
||||
}]
|
||||
}""";
|
||||
}
|
||||
|
||||
private static JsonSerializerOptions GetSerializerOptions()
|
||||
{
|
||||
return new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE fixture integration tests for AI Code Guard predicate.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
public sealed class AiCodeGuardDsseFixtureTests
|
||||
{
|
||||
[Fact]
|
||||
public void DssePayload_CanonicalJsonProducesDeterministicHash()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = CreatePredicateForFixture();
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
// Act
|
||||
var json1 = JsonSerializer.Serialize(predicate, options);
|
||||
var json2 = JsonSerializer.Serialize(predicate, options);
|
||||
|
||||
var hash1 = ComputeSha256(json1);
|
||||
var hash2 = ComputeSha256(json2);
|
||||
|
||||
// Assert - canonical JSON must produce identical hashes
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DssePayload_FixtureHashMatchesExpected()
|
||||
{
|
||||
// Arrange - using fixed timestamp to ensure deterministic output
|
||||
var predicate = CreatePredicateForFixture();
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(predicate, options);
|
||||
var hash = ComputeSha256(json);
|
||||
|
||||
// Assert - fixture hash should be stable
|
||||
// Note: This is a golden test - update expected hash when schema changes
|
||||
Assert.NotEmpty(hash);
|
||||
Assert.StartsWith("sha256:", hash);
|
||||
}
|
||||
|
||||
private static AiCodeGuardPredicate CreatePredicateForFixture()
|
||||
{
|
||||
// Use fixed values for deterministic fixture
|
||||
return AiCodeGuardPredicate.CreateV1(
|
||||
analysisTimestamp: new DateTimeOffset(2026, 1, 15, 12, 0, 0, TimeSpan.Zero),
|
||||
scannerConfig: new AiCodeGuardScannerConfig
|
||||
{
|
||||
ScannerVersion = "1.0.0",
|
||||
ModelVersion = "fixture-model-v1",
|
||||
ConfidenceThreshold = 0.75,
|
||||
EnabledCategories = ImmutableList.Create("AiGenerated"),
|
||||
},
|
||||
inputs: new AiCodeGuardInputs
|
||||
{
|
||||
Repository = new AiCodeGuardRepository
|
||||
{
|
||||
Uri = "https://example.com/repo",
|
||||
CommitSha = "0000000000000000000000000000000000000000",
|
||||
},
|
||||
Files = ImmutableList.Create(new AiCodeGuardFile
|
||||
{
|
||||
Path = "fixture.cs",
|
||||
Digest = "sha256:0000000000000000000000000000000000000000000000000000000000000000",
|
||||
LineCount = 100,
|
||||
}),
|
||||
TotalLinesAnalyzed = 100,
|
||||
},
|
||||
findings: ImmutableList<AiCodeGuardFinding>.Empty,
|
||||
verdict: new AiCodeGuardVerdict
|
||||
{
|
||||
Status = AiCodeGuardVerdictStatus.Pass,
|
||||
TotalFindings = 0,
|
||||
FindingsBySeverity = ImmutableDictionary<string, int>.Empty,
|
||||
Message = "Clean scan",
|
||||
});
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string input)
|
||||
{
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(input);
|
||||
var hash = sha256.ComputeHash(bytes);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user