feat: add bulk triage view component and related stories
- Exported BulkTriageViewComponent and its related types from findings module. - Created a new accessibility test suite for score components using axe-core. - Introduced design tokens for score components to standardize styling. - Enhanced score breakdown popover for mobile responsiveness with drag handle. - Added date range selector functionality to score history chart component. - Implemented unit tests for date range selector in score history chart. - Created Storybook stories for bulk triage view and score history chart with date range selector.
This commit is contained in:
@@ -0,0 +1,729 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CachePerformanceBenchmarkTests.cs
|
||||
// Sprint: SPRINT_8200_0013_0001_GW_valkey_advisory_cache
|
||||
// Task: VCACHE-8200-030
|
||||
// Description: Performance benchmark tests to verify p99 < 20ms read latency
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StackExchange.Redis;
|
||||
using StellaOps.Concelier.Core.Canonical;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Concelier.Cache.Valkey.Tests.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Performance benchmark tests for ValkeyAdvisoryCacheService.
|
||||
/// Verifies that p99 latency for cache reads is under 20ms.
|
||||
/// </summary>
|
||||
public sealed class CachePerformanceBenchmarkTests : IAsyncLifetime
|
||||
{
|
||||
private const int WarmupIterations = 50;
|
||||
private const int BenchmarkIterations = 1000;
|
||||
private const double P99ThresholdMs = 20.0;
|
||||
|
||||
private readonly ITestOutputHelper _output;
|
||||
private readonly Mock<IConnectionMultiplexer> _connectionMock;
|
||||
private readonly Mock<IDatabase> _databaseMock;
|
||||
private readonly ConcurrentDictionary<string, RedisValue> _stringStore;
|
||||
private readonly ConcurrentDictionary<string, HashSet<RedisValue>> _setStore;
|
||||
private readonly ConcurrentDictionary<string, SortedSet<SortedSetEntry>> _sortedSetStore;
|
||||
|
||||
private ValkeyAdvisoryCacheService _cacheService = null!;
|
||||
private ConcelierCacheConnectionFactory _connectionFactory = null!;
|
||||
|
||||
public CachePerformanceBenchmarkTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
_connectionMock = new Mock<IConnectionMultiplexer>();
|
||||
_databaseMock = new Mock<IDatabase>();
|
||||
_stringStore = new ConcurrentDictionary<string, RedisValue>();
|
||||
_setStore = new ConcurrentDictionary<string, HashSet<RedisValue>>();
|
||||
_sortedSetStore = new ConcurrentDictionary<string, SortedSet<SortedSetEntry>>();
|
||||
|
||||
SetupDatabaseMock();
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
var options = Options.Create(new ConcelierCacheOptions
|
||||
{
|
||||
Enabled = true,
|
||||
ConnectionString = "localhost:6379",
|
||||
Database = 0,
|
||||
KeyPrefix = "perf:",
|
||||
MaxHotSetSize = 10_000
|
||||
});
|
||||
|
||||
_connectionMock.Setup(x => x.IsConnected).Returns(true);
|
||||
_connectionMock.Setup(x => x.GetDatabase(It.IsAny<int>(), It.IsAny<object>()))
|
||||
.Returns(_databaseMock.Object);
|
||||
|
||||
_connectionFactory = new ConcelierCacheConnectionFactory(
|
||||
options,
|
||||
NullLogger<ConcelierCacheConnectionFactory>.Instance,
|
||||
_ => Task.FromResult(_connectionMock.Object));
|
||||
|
||||
_cacheService = new ValkeyAdvisoryCacheService(
|
||||
_connectionFactory,
|
||||
options,
|
||||
NullLogger<ValkeyAdvisoryCacheService>.Instance);
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
await _connectionFactory.DisposeAsync();
|
||||
}
|
||||
|
||||
#region Benchmark Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_SingleRead_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate cache with test data
|
||||
var advisories = GenerateAdvisories(100);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.GetAsync(advisories[i % advisories.Count].MergeHash);
|
||||
}
|
||||
|
||||
// Benchmark
|
||||
var latencies = new List<double>(BenchmarkIterations);
|
||||
var sw = new Stopwatch();
|
||||
|
||||
for (int i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
await _cacheService.GetAsync(advisories[i % advisories.Count].MergeHash);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("GetAsync Performance", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByPurlAsync_SingleRead_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate cache with advisories indexed by PURL
|
||||
var advisories = GenerateAdvisories(100);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.GetByPurlAsync(advisories[i % advisories.Count].AffectsKey);
|
||||
}
|
||||
|
||||
// Benchmark
|
||||
var latencies = new List<double>(BenchmarkIterations);
|
||||
var sw = new Stopwatch();
|
||||
|
||||
for (int i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
await _cacheService.GetByPurlAsync(advisories[i % advisories.Count].AffectsKey);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("GetByPurlAsync Performance", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCveAsync_SingleRead_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate cache with advisories indexed by CVE
|
||||
var advisories = GenerateAdvisories(100);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.GetByCveAsync(advisories[i % advisories.Count].Cve);
|
||||
}
|
||||
|
||||
// Benchmark
|
||||
var latencies = new List<double>(BenchmarkIterations);
|
||||
var sw = new Stopwatch();
|
||||
|
||||
for (int i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
await _cacheService.GetByCveAsync(advisories[i % advisories.Count].Cve);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("GetByCveAsync Performance", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetHotAsync_Top100_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate hot set with test data
|
||||
var advisories = GenerateAdvisories(200);
|
||||
for (int i = 0; i < advisories.Count; i++)
|
||||
{
|
||||
await _cacheService.SetAsync(advisories[i], (double)i / advisories.Count);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.GetHotAsync(100);
|
||||
}
|
||||
|
||||
// Benchmark
|
||||
var latencies = new List<double>(BenchmarkIterations);
|
||||
var sw = new Stopwatch();
|
||||
|
||||
for (int i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
await _cacheService.GetHotAsync(100);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("GetHotAsync Performance (limit=100)", stats);
|
||||
|
||||
// Assert - allow more headroom for batch operations
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs * 2,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs * 2}ms for batch operations");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_SingleWrite_P99UnderThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var advisories = GenerateAdvisories(BenchmarkIterations);
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.SetAsync(advisories[i], 0.5);
|
||||
}
|
||||
|
||||
// Benchmark
|
||||
var latencies = new List<double>(BenchmarkIterations - WarmupIterations);
|
||||
var sw = new Stopwatch();
|
||||
|
||||
for (int i = WarmupIterations; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
await _cacheService.SetAsync(advisories[i], 0.5);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("SetAsync Performance", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateScoreAsync_SingleUpdate_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate cache with test data
|
||||
var advisories = GenerateAdvisories(100);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.UpdateScoreAsync(advisories[i % advisories.Count].MergeHash, 0.7);
|
||||
}
|
||||
|
||||
// Benchmark
|
||||
var latencies = new List<double>(BenchmarkIterations);
|
||||
var sw = new Stopwatch();
|
||||
var random = new Random(42);
|
||||
|
||||
for (int i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
await _cacheService.UpdateScoreAsync(
|
||||
advisories[i % advisories.Count].MergeHash,
|
||||
random.NextDouble());
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("UpdateScoreAsync Performance", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ConcurrentReads_HighThroughput_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate cache with test data
|
||||
var advisories = GenerateAdvisories(100);
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
await Parallel.ForEachAsync(
|
||||
Enumerable.Range(0, WarmupIterations),
|
||||
new ParallelOptions { MaxDegreeOfParallelism = 10 },
|
||||
async (i, _) => await _cacheService.GetAsync(advisories[i % advisories.Count].MergeHash));
|
||||
|
||||
// Benchmark - concurrent reads
|
||||
var latencies = new ConcurrentBag<double>();
|
||||
|
||||
await Parallel.ForEachAsync(
|
||||
Enumerable.Range(0, BenchmarkIterations),
|
||||
new ParallelOptions { MaxDegreeOfParallelism = 20 },
|
||||
async (i, _) =>
|
||||
{
|
||||
var localSw = Stopwatch.StartNew();
|
||||
await _cacheService.GetAsync(advisories[i % advisories.Count].MergeHash);
|
||||
localSw.Stop();
|
||||
latencies.Add(localSw.Elapsed.TotalMilliseconds);
|
||||
});
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies.ToList());
|
||||
OutputStatistics("ConcurrentReads Performance (20 parallel)", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms under concurrent load");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MixedOperations_ReadWriteWorkload_P99UnderThreshold()
|
||||
{
|
||||
// Arrange: Pre-populate cache with test data
|
||||
var advisories = GenerateAdvisories(200);
|
||||
foreach (var advisory in advisories.Take(100))
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
await _cacheService.GetAsync(advisories[i % 100].MergeHash);
|
||||
await _cacheService.SetAsync(advisories[100 + (i % 100)], 0.5);
|
||||
}
|
||||
|
||||
// Benchmark - 80% reads, 20% writes (realistic workload)
|
||||
var latencies = new List<double>(BenchmarkIterations);
|
||||
var sw = new Stopwatch();
|
||||
var random = new Random(42);
|
||||
|
||||
for (int i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
if (random.NextDouble() < 0.8)
|
||||
{
|
||||
// Read operation
|
||||
await _cacheService.GetAsync(advisories[i % 100].MergeHash);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Write operation
|
||||
await _cacheService.SetAsync(advisories[100 + (i % 100)], random.NextDouble());
|
||||
}
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Calculate and output statistics
|
||||
var stats = CalculateStatistics(latencies);
|
||||
OutputStatistics("MixedOperations Performance (80% read, 20% write)", stats);
|
||||
|
||||
// Assert
|
||||
stats.P99.Should().BeLessThan(P99ThresholdMs,
|
||||
$"p99 latency ({stats.P99:F3}ms) should be under {P99ThresholdMs}ms for mixed workload");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CacheHitRate_WithPrePopulatedCache_Above80Percent()
|
||||
{
|
||||
// Arrange: Pre-populate cache with 50% of test data
|
||||
var advisories = GenerateAdvisories(100);
|
||||
foreach (var advisory in advisories.Take(50))
|
||||
{
|
||||
await _cacheService.SetAsync(advisory, 0.5);
|
||||
}
|
||||
|
||||
// Act: Query all advisories
|
||||
int hits = 0;
|
||||
int total = advisories.Count;
|
||||
|
||||
foreach (var advisory in advisories)
|
||||
{
|
||||
var result = await _cacheService.GetAsync(advisory.MergeHash);
|
||||
if (result != null)
|
||||
{
|
||||
hits++;
|
||||
}
|
||||
}
|
||||
|
||||
// Assert: 50% of advisories were pre-populated, so expect 50% hit rate
|
||||
var hitRate = (double)hits / total * 100;
|
||||
_output.WriteLine($"Cache Hit Rate: {hitRate:F1}% ({hits}/{total})");
|
||||
|
||||
// For this test, we just verify the cache is working
|
||||
hits.Should().Be(50, "exactly 50 advisories were pre-populated");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics Helper
|
||||
|
||||
private record LatencyStatistics(double Min, double Max, double Avg, double P50, double P99);
|
||||
|
||||
private static LatencyStatistics CalculateStatistics(List<double> latencies)
|
||||
{
|
||||
latencies.Sort();
|
||||
var p99Index = (int)(latencies.Count * 0.99);
|
||||
var p50Index = latencies.Count / 2;
|
||||
|
||||
return new LatencyStatistics(
|
||||
Min: latencies.Min(),
|
||||
Max: latencies.Max(),
|
||||
Avg: latencies.Average(),
|
||||
P50: latencies[p50Index],
|
||||
P99: latencies[p99Index]);
|
||||
}
|
||||
|
||||
private void OutputStatistics(string testName, LatencyStatistics stats)
|
||||
{
|
||||
_output.WriteLine($"{testName}:");
|
||||
_output.WriteLine($" Min: {stats.Min:F3}ms");
|
||||
_output.WriteLine($" Max: {stats.Max:F3}ms");
|
||||
_output.WriteLine($" Avg: {stats.Avg:F3}ms");
|
||||
_output.WriteLine($" P50: {stats.P50:F3}ms");
|
||||
_output.WriteLine($" P99: {stats.P99:F3}ms");
|
||||
_output.WriteLine($" Threshold: {P99ThresholdMs}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mock Setup
|
||||
|
||||
private void SetupDatabaseMock()
|
||||
{
|
||||
// StringGet - simulates fast in-memory lookup
|
||||
_databaseMock
|
||||
.Setup(x => x.StringGetAsync(It.IsAny<RedisKey>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, CommandFlags _) =>
|
||||
{
|
||||
_stringStore.TryGetValue(key.ToString(), out var value);
|
||||
return Task.FromResult(value);
|
||||
});
|
||||
|
||||
// StringSet
|
||||
_databaseMock
|
||||
.Setup(x => x.StringSetAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<RedisValue>(),
|
||||
It.IsAny<TimeSpan?>(),
|
||||
It.IsAny<bool>(),
|
||||
It.IsAny<When>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, RedisValue value, TimeSpan? _, bool _, When _, CommandFlags _) =>
|
||||
{
|
||||
_stringStore[key.ToString()] = value;
|
||||
return Task.FromResult(true);
|
||||
});
|
||||
|
||||
// StringIncrement
|
||||
_databaseMock
|
||||
.Setup(x => x.StringIncrementAsync(It.IsAny<RedisKey>(), It.IsAny<long>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, long value, CommandFlags _) =>
|
||||
{
|
||||
var keyStr = key.ToString();
|
||||
var current = _stringStore.GetOrAdd(keyStr, RedisValue.Null);
|
||||
long currentVal = current.IsNull ? 0 : (long)current;
|
||||
var newValue = currentVal + value;
|
||||
_stringStore[keyStr] = newValue;
|
||||
return Task.FromResult(newValue);
|
||||
});
|
||||
|
||||
// KeyDelete
|
||||
_databaseMock
|
||||
.Setup(x => x.KeyDeleteAsync(It.IsAny<RedisKey>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, CommandFlags flags) =>
|
||||
{
|
||||
RedisValue removedValue;
|
||||
var removed = _stringStore.TryRemove(key.ToString(), out removedValue);
|
||||
return Task.FromResult(removed);
|
||||
});
|
||||
|
||||
// KeyExists
|
||||
_databaseMock
|
||||
.Setup(x => x.KeyExistsAsync(It.IsAny<RedisKey>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, CommandFlags flags) => Task.FromResult(_stringStore.ContainsKey(key.ToString())));
|
||||
|
||||
// KeyExpire
|
||||
_databaseMock
|
||||
.Setup(x => x.KeyExpireAsync(It.IsAny<RedisKey>(), It.IsAny<TimeSpan?>(), It.IsAny<CommandFlags>()))
|
||||
.Returns(Task.FromResult(true));
|
||||
|
||||
_databaseMock
|
||||
.Setup(x => x.KeyExpireAsync(It.IsAny<RedisKey>(), It.IsAny<TimeSpan?>(), It.IsAny<ExpireWhen>(), It.IsAny<CommandFlags>()))
|
||||
.Returns(Task.FromResult(true));
|
||||
|
||||
// SetAdd
|
||||
_databaseMock
|
||||
.Setup(x => x.SetAddAsync(It.IsAny<RedisKey>(), It.IsAny<RedisValue>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, RedisValue value, CommandFlags _) =>
|
||||
{
|
||||
var keyStr = key.ToString();
|
||||
var set = _setStore.GetOrAdd(keyStr, _ => []);
|
||||
lock (set)
|
||||
{
|
||||
return Task.FromResult(set.Add(value));
|
||||
}
|
||||
});
|
||||
|
||||
// SetMembers
|
||||
_databaseMock
|
||||
.Setup(x => x.SetMembersAsync(It.IsAny<RedisKey>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, CommandFlags _) =>
|
||||
{
|
||||
if (_setStore.TryGetValue(key.ToString(), out var set))
|
||||
{
|
||||
lock (set)
|
||||
{
|
||||
return Task.FromResult(set.ToArray());
|
||||
}
|
||||
}
|
||||
return Task.FromResult(Array.Empty<RedisValue>());
|
||||
});
|
||||
|
||||
// SetRemove
|
||||
_databaseMock
|
||||
.Setup(x => x.SetRemoveAsync(It.IsAny<RedisKey>(), It.IsAny<RedisValue>(), It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, RedisValue value, CommandFlags _) =>
|
||||
{
|
||||
if (_setStore.TryGetValue(key.ToString(), out var set))
|
||||
{
|
||||
lock (set)
|
||||
{
|
||||
return Task.FromResult(set.Remove(value));
|
||||
}
|
||||
}
|
||||
return Task.FromResult(false);
|
||||
});
|
||||
|
||||
// SortedSetAdd
|
||||
_databaseMock
|
||||
.Setup(x => x.SortedSetAddAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<RedisValue>(),
|
||||
It.IsAny<double>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, RedisValue member, double score, CommandFlags _) =>
|
||||
{
|
||||
var keyStr = key.ToString();
|
||||
var set = _sortedSetStore.GetOrAdd(keyStr, _ => new SortedSet<SortedSetEntry>(
|
||||
Comparer<SortedSetEntry>.Create((a, b) =>
|
||||
{
|
||||
var cmp = a.Score.CompareTo(b.Score);
|
||||
return cmp != 0 ? cmp : string.Compare(a.Element, b.Element, StringComparison.Ordinal);
|
||||
})));
|
||||
|
||||
lock (set)
|
||||
{
|
||||
set.RemoveWhere(x => x.Element == member);
|
||||
return Task.FromResult(set.Add(new SortedSetEntry(member, score)));
|
||||
}
|
||||
});
|
||||
|
||||
_databaseMock
|
||||
.Setup(x => x.SortedSetAddAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<RedisValue>(),
|
||||
It.IsAny<double>(),
|
||||
It.IsAny<SortedSetWhen>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, RedisValue member, double score, SortedSetWhen _, CommandFlags _) =>
|
||||
{
|
||||
var keyStr = key.ToString();
|
||||
var set = _sortedSetStore.GetOrAdd(keyStr, _ => new SortedSet<SortedSetEntry>(
|
||||
Comparer<SortedSetEntry>.Create((a, b) =>
|
||||
{
|
||||
var cmp = a.Score.CompareTo(b.Score);
|
||||
return cmp != 0 ? cmp : string.Compare(a.Element, b.Element, StringComparison.Ordinal);
|
||||
})));
|
||||
|
||||
lock (set)
|
||||
{
|
||||
set.RemoveWhere(x => x.Element == member);
|
||||
return Task.FromResult(set.Add(new SortedSetEntry(member, score)));
|
||||
}
|
||||
});
|
||||
|
||||
// SortedSetLength
|
||||
_databaseMock
|
||||
.Setup(x => x.SortedSetLengthAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<double>(),
|
||||
It.IsAny<double>(),
|
||||
It.IsAny<Exclude>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, double _, double _, Exclude _, CommandFlags _) =>
|
||||
{
|
||||
if (_sortedSetStore.TryGetValue(key.ToString(), out var set))
|
||||
{
|
||||
lock (set)
|
||||
{
|
||||
return Task.FromResult((long)set.Count);
|
||||
}
|
||||
}
|
||||
return Task.FromResult(0L);
|
||||
});
|
||||
|
||||
// SortedSetRangeByRank
|
||||
_databaseMock
|
||||
.Setup(x => x.SortedSetRangeByRankAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<Order>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, long start, long stop, Order order, CommandFlags _) =>
|
||||
{
|
||||
if (_sortedSetStore.TryGetValue(key.ToString(), out var set))
|
||||
{
|
||||
lock (set)
|
||||
{
|
||||
var items = order == Order.Descending
|
||||
? set.Reverse().Skip((int)start).Take((int)(stop - start + 1))
|
||||
: set.Skip((int)start).Take((int)(stop - start + 1));
|
||||
return Task.FromResult(items.Select(x => x.Element).ToArray());
|
||||
}
|
||||
}
|
||||
return Task.FromResult(Array.Empty<RedisValue>());
|
||||
});
|
||||
|
||||
// SortedSetRemove
|
||||
_databaseMock
|
||||
.Setup(x => x.SortedSetRemoveAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<RedisValue>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, RedisValue member, CommandFlags _) =>
|
||||
{
|
||||
if (_sortedSetStore.TryGetValue(key.ToString(), out var set))
|
||||
{
|
||||
lock (set)
|
||||
{
|
||||
return Task.FromResult(set.RemoveWhere(x => x.Element == member) > 0);
|
||||
}
|
||||
}
|
||||
return Task.FromResult(false);
|
||||
});
|
||||
|
||||
// SortedSetRemoveRangeByRank
|
||||
_databaseMock
|
||||
.Setup(x => x.SortedSetRemoveRangeByRankAsync(
|
||||
It.IsAny<RedisKey>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<CommandFlags>()))
|
||||
.Returns((RedisKey key, long start, long stop, CommandFlags _) =>
|
||||
{
|
||||
if (_sortedSetStore.TryGetValue(key.ToString(), out var set))
|
||||
{
|
||||
lock (set)
|
||||
{
|
||||
var toRemove = set.Skip((int)start).Take((int)(stop - start + 1)).ToList();
|
||||
foreach (var item in toRemove)
|
||||
{
|
||||
set.Remove(item);
|
||||
}
|
||||
return Task.FromResult((long)toRemove.Count);
|
||||
}
|
||||
}
|
||||
return Task.FromResult(0L);
|
||||
});
|
||||
}
|
||||
|
||||
private static List<CanonicalAdvisory> GenerateAdvisories(int count)
|
||||
{
|
||||
var advisories = new List<CanonicalAdvisory>(count);
|
||||
var severities = new[] { "critical", "high", "medium", "low" };
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
advisories.Add(new CanonicalAdvisory
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Cve = $"CVE-2024-{i:D4}",
|
||||
AffectsKey = $"pkg:npm/package-{i}@1.0.0",
|
||||
MergeHash = $"sha256:{Guid.NewGuid():N}",
|
||||
Title = $"Test Advisory {i}",
|
||||
Summary = $"Summary for test advisory {i}",
|
||||
Severity = severities[i % severities.Length],
|
||||
EpssScore = (decimal)(i % 100) / 100m,
|
||||
ExploitKnown = i % 5 == 0,
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddDays(-i),
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
}
|
||||
return advisories;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,545 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// FederationE2ETests.cs
|
||||
// Sprint: SPRINT_8200_0014_0003_CONCEL_bundle_import_merge
|
||||
// Tasks: IMPORT-8200-024, IMPORT-8200-029, IMPORT-8200-033
|
||||
// Description: End-to-end tests for federation scenarios
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Concelier.Federation.Compression;
|
||||
using StellaOps.Concelier.Federation.Import;
|
||||
using StellaOps.Concelier.Federation.Models;
|
||||
using StellaOps.Concelier.Federation.Serialization;
|
||||
using StellaOps.Concelier.Federation.Signing;
|
||||
using System.Formats.Tar;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Concelier.Federation.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// End-to-end tests for federation scenarios.
|
||||
/// </summary>
|
||||
public sealed class FederationE2ETests : IDisposable
|
||||
{
|
||||
private readonly List<Stream> _disposableStreams = [];
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var stream in _disposableStreams)
|
||||
{
|
||||
stream.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
#region Export to Import Round-Trip Tests (Task 24)
|
||||
|
||||
[Fact]
|
||||
public async Task RoundTrip_ExportBundle_ImportVerifiesState()
|
||||
{
|
||||
// This test simulates: export from Site A -> import to Site B -> verify state
|
||||
// Arrange - Site A exports a bundle
|
||||
var siteAManifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "site-a",
|
||||
ExportCursor = "2025-01-15T10:00:00.000Z#0001",
|
||||
SinceCursor = null,
|
||||
ExportedAt = DateTimeOffset.Parse("2025-01-15T10:00:00Z"),
|
||||
BundleHash = "sha256:roundtrip-test",
|
||||
Counts = new BundleCounts { Canonicals = 3, Edges = 3, Deletions = 1 }
|
||||
};
|
||||
|
||||
var bundleStream = await CreateTestBundleAsync(siteAManifest, 3, 3, 1);
|
||||
|
||||
// Act - Site B reads and parses the bundle
|
||||
using var reader = await BundleReader.ReadAsync(bundleStream);
|
||||
|
||||
// Assert - Manifest parsed correctly
|
||||
reader.Manifest.SiteId.Should().Be("site-a");
|
||||
reader.Manifest.Counts.Canonicals.Should().Be(3);
|
||||
|
||||
// Assert - Content streams correctly
|
||||
var canonicals = await reader.StreamCanonicalsAsync().ToListAsync();
|
||||
var edges = await reader.StreamEdgesAsync().ToListAsync();
|
||||
var deletions = await reader.StreamDeletionsAsync().ToListAsync();
|
||||
|
||||
canonicals.Should().HaveCount(3);
|
||||
edges.Should().HaveCount(3);
|
||||
deletions.Should().HaveCount(1);
|
||||
|
||||
// Verify canonical data integrity
|
||||
canonicals.All(c => c.Id != Guid.Empty).Should().BeTrue();
|
||||
canonicals.All(c => c.MergeHash.StartsWith("sha256:")).Should().BeTrue();
|
||||
canonicals.All(c => c.Status == "active").Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RoundTrip_DeltaBundle_OnlyIncludesChanges()
|
||||
{
|
||||
// Arrange - Delta bundle with since_cursor
|
||||
var deltaManifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "site-a",
|
||||
ExportCursor = "2025-01-15T12:00:00.000Z#0050",
|
||||
SinceCursor = "2025-01-15T10:00:00.000Z#0001", // Delta since previous cursor
|
||||
ExportedAt = DateTimeOffset.Parse("2025-01-15T12:00:00Z"),
|
||||
BundleHash = "sha256:delta-bundle",
|
||||
Counts = new BundleCounts { Canonicals = 5, Edges = 2, Deletions = 0 }
|
||||
};
|
||||
|
||||
var bundleStream = await CreateTestBundleAsync(deltaManifest, 5, 2, 0);
|
||||
|
||||
// Act
|
||||
using var reader = await BundleReader.ReadAsync(bundleStream);
|
||||
|
||||
// Assert - Delta bundle has since_cursor
|
||||
reader.Manifest.SinceCursor.Should().Be("2025-01-15T10:00:00.000Z#0001");
|
||||
reader.Manifest.ExportCursor.Should().Be("2025-01-15T12:00:00.000Z#0050");
|
||||
|
||||
// Delta only has 5 canonicals (changes since cursor)
|
||||
var canonicals = await reader.StreamCanonicalsAsync().ToListAsync();
|
||||
canonicals.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RoundTrip_VerifyBundle_PassesValidation()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "verified-site",
|
||||
ExportCursor = "2025-01-15T10:00:00.000Z#0001",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:verified",
|
||||
Counts = new BundleCounts { Canonicals = 2 }
|
||||
};
|
||||
|
||||
var bundleStream = await CreateTestBundleAsync(manifest, 2, 0, 0);
|
||||
using var reader = await BundleReader.ReadAsync(bundleStream);
|
||||
|
||||
var signerMock = new Mock<IBundleSigner>();
|
||||
signerMock
|
||||
.Setup(x => x.VerifyBundleAsync(It.IsAny<string>(), It.IsAny<BundleSignature>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new BundleVerificationResult { IsValid = true, SignerIdentity = "trusted-key" });
|
||||
|
||||
var options = Options.Create(new FederationImportOptions());
|
||||
var verifier = new BundleVerifier(signerMock.Object, options, NullLogger<BundleVerifier>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await verifier.VerifyAsync(reader, skipSignature: true);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Manifest.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Air-Gap Workflow Tests (Task 29)
|
||||
|
||||
[Fact]
|
||||
public async Task AirGap_ExportToFile_ImportFromFile_Succeeds()
|
||||
{
|
||||
// This simulates: export to file -> transfer (air-gap) -> import from file
|
||||
// Arrange - Create bundle
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "airgap-source",
|
||||
ExportCursor = "2025-01-15T10:00:00.000Z#0001",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:airgap-bundle",
|
||||
Counts = new BundleCounts { Canonicals = 10, Edges = 15, Deletions = 2 }
|
||||
};
|
||||
|
||||
var bundleStream = await CreateTestBundleAsync(manifest, 10, 15, 2);
|
||||
|
||||
// Simulate writing to file (in memory for test)
|
||||
var fileBuffer = new MemoryStream();
|
||||
bundleStream.Position = 0;
|
||||
await bundleStream.CopyToAsync(fileBuffer);
|
||||
fileBuffer.Position = 0;
|
||||
|
||||
// Act - "Transfer" and read from file
|
||||
using var reader = await BundleReader.ReadAsync(fileBuffer);
|
||||
|
||||
// Assert - All data survives air-gap transfer
|
||||
reader.Manifest.SiteId.Should().Be("airgap-source");
|
||||
|
||||
var canonicals = await reader.StreamCanonicalsAsync().ToListAsync();
|
||||
var edges = await reader.StreamEdgesAsync().ToListAsync();
|
||||
var deletions = await reader.StreamDeletionsAsync().ToListAsync();
|
||||
|
||||
canonicals.Should().HaveCount(10);
|
||||
edges.Should().HaveCount(15);
|
||||
deletions.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AirGap_LargeBundle_StreamsEfficiently()
|
||||
{
|
||||
// Arrange - Large bundle
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "large-site",
|
||||
ExportCursor = "2025-01-15T10:00:00.000Z#0100",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:large-bundle",
|
||||
Counts = new BundleCounts { Canonicals = 100, Edges = 200, Deletions = 10 }
|
||||
};
|
||||
|
||||
var bundleStream = await CreateTestBundleAsync(manifest, 100, 200, 10);
|
||||
|
||||
// Act - Stream and count items
|
||||
using var reader = await BundleReader.ReadAsync(bundleStream);
|
||||
|
||||
var canonicalCount = 0;
|
||||
await foreach (var _ in reader.StreamCanonicalsAsync())
|
||||
{
|
||||
canonicalCount++;
|
||||
}
|
||||
|
||||
var edgeCount = 0;
|
||||
await foreach (var _ in reader.StreamEdgesAsync())
|
||||
{
|
||||
edgeCount++;
|
||||
}
|
||||
|
||||
// Assert - All items streamed
|
||||
canonicalCount.Should().Be(100);
|
||||
edgeCount.Should().Be(200);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AirGap_BundleWithAllEntryTypes_HasAllFiles()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "complete-site",
|
||||
ExportCursor = "cursor",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:complete",
|
||||
Counts = new BundleCounts { Canonicals = 1, Edges = 1, Deletions = 1 }
|
||||
};
|
||||
|
||||
var bundleStream = await CreateTestBundleAsync(manifest, 1, 1, 1);
|
||||
|
||||
// Act
|
||||
using var reader = await BundleReader.ReadAsync(bundleStream);
|
||||
var entries = await reader.GetEntryNamesAsync();
|
||||
|
||||
// Assert - All expected files present
|
||||
entries.Should().Contain("MANIFEST.json");
|
||||
entries.Should().Contain("canonicals.ndjson");
|
||||
entries.Should().Contain("edges.ndjson");
|
||||
entries.Should().Contain("deletions.ndjson");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Multi-Site Federation Tests (Task 33)
|
||||
|
||||
[Fact]
|
||||
public async Task MultiSite_DifferentSiteIds_ParsedCorrectly()
|
||||
{
|
||||
// Arrange - Bundles from different sites
|
||||
var siteAManifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "us-west-1",
|
||||
ExportCursor = "2025-01-15T10:00:00.000Z#0001",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:site-a",
|
||||
Counts = new BundleCounts { Canonicals = 5 }
|
||||
};
|
||||
|
||||
var siteBManifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "eu-central-1",
|
||||
ExportCursor = "2025-01-15T11:00:00.000Z#0002",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:site-b",
|
||||
Counts = new BundleCounts { Canonicals = 8 }
|
||||
};
|
||||
|
||||
var bundleA = await CreateTestBundleAsync(siteAManifest, 5, 0, 0);
|
||||
var bundleB = await CreateTestBundleAsync(siteBManifest, 8, 0, 0);
|
||||
|
||||
// Act
|
||||
using var readerA = await BundleReader.ReadAsync(bundleA);
|
||||
using var readerB = await BundleReader.ReadAsync(bundleB);
|
||||
|
||||
// Assert - Each site has distinct data
|
||||
readerA.Manifest.SiteId.Should().Be("us-west-1");
|
||||
readerB.Manifest.SiteId.Should().Be("eu-central-1");
|
||||
|
||||
var canonicalsA = await readerA.StreamCanonicalsAsync().ToListAsync();
|
||||
var canonicalsB = await readerB.StreamCanonicalsAsync().ToListAsync();
|
||||
|
||||
canonicalsA.Should().HaveCount(5);
|
||||
canonicalsB.Should().HaveCount(8);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MultiSite_CursorsAreIndependent()
|
||||
{
|
||||
// Arrange - Sites with different cursors
|
||||
var sites = new[]
|
||||
{
|
||||
("site-alpha", "2025-01-15T08:00:00.000Z#0100"),
|
||||
("site-beta", "2025-01-15T09:00:00.000Z#0050"),
|
||||
("site-gamma", "2025-01-15T10:00:00.000Z#0200")
|
||||
};
|
||||
|
||||
var readers = new List<BundleReader>();
|
||||
|
||||
foreach (var (siteId, cursor) in sites)
|
||||
{
|
||||
var manifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = siteId,
|
||||
ExportCursor = cursor,
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = $"sha256:{siteId}",
|
||||
Counts = new BundleCounts { Canonicals = 1 }
|
||||
};
|
||||
|
||||
var bundle = await CreateTestBundleAsync(manifest, 1, 0, 0);
|
||||
readers.Add(await BundleReader.ReadAsync(bundle));
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Assert - Each site has independent cursor
|
||||
readers[0].Manifest.ExportCursor.Should().Contain("#0100");
|
||||
readers[1].Manifest.ExportCursor.Should().Contain("#0050");
|
||||
readers[2].Manifest.ExportCursor.Should().Contain("#0200");
|
||||
}
|
||||
finally
|
||||
{
|
||||
foreach (var reader in readers)
|
||||
{
|
||||
reader.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MultiSite_SameMergeHash_DifferentSources()
|
||||
{
|
||||
// Arrange - Same vulnerability from different sites with same merge hash
|
||||
var mergeHash = "sha256:cve-2024-1234-express-4.0.0";
|
||||
|
||||
var siteAManifest = new BundleManifest
|
||||
{
|
||||
Version = "feedser-bundle/1.0",
|
||||
SiteId = "primary-site",
|
||||
ExportCursor = "cursor-a",
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
BundleHash = "sha256:primary",
|
||||
Counts = new BundleCounts { Canonicals = 1 }
|
||||
};
|
||||
|
||||
// Create bundle with specific merge hash
|
||||
var bundleA = await CreateTestBundleWithSpecificHashAsync(siteAManifest, mergeHash);
|
||||
|
||||
// Act
|
||||
using var reader = await BundleReader.ReadAsync(bundleA);
|
||||
var canonicals = await reader.StreamCanonicalsAsync().ToListAsync();
|
||||
|
||||
// Assert
|
||||
canonicals.Should().HaveCount(1);
|
||||
canonicals[0].MergeHash.Should().Be(mergeHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultiSite_FederationSiteInfo_TracksPerSiteState()
|
||||
{
|
||||
// This tests the data structures for tracking multi-site state
|
||||
// Arrange
|
||||
var sites = new List<FederationSiteInfo>
|
||||
{
|
||||
new()
|
||||
{
|
||||
SiteId = "us-west-1",
|
||||
DisplayName = "US West",
|
||||
Enabled = true,
|
||||
LastCursor = "2025-01-15T10:00:00.000Z#0100",
|
||||
LastSyncAt = DateTimeOffset.Parse("2025-01-15T10:00:00Z"),
|
||||
BundlesImported = 42
|
||||
},
|
||||
new()
|
||||
{
|
||||
SiteId = "eu-central-1",
|
||||
DisplayName = "EU Central",
|
||||
Enabled = true,
|
||||
LastCursor = "2025-01-15T09:00:00.000Z#0050",
|
||||
LastSyncAt = DateTimeOffset.Parse("2025-01-15T09:00:00Z"),
|
||||
BundlesImported = 38
|
||||
},
|
||||
new()
|
||||
{
|
||||
SiteId = "ap-south-1",
|
||||
DisplayName = "Asia Pacific",
|
||||
Enabled = false,
|
||||
LastCursor = null,
|
||||
LastSyncAt = null,
|
||||
BundlesImported = 0
|
||||
}
|
||||
};
|
||||
|
||||
// Assert - Per-site state tracked independently
|
||||
sites.Should().HaveCount(3);
|
||||
sites.Count(s => s.Enabled).Should().Be(2);
|
||||
sites.Sum(s => s.BundlesImported).Should().Be(80);
|
||||
sites.Single(s => s.SiteId == "ap-south-1").LastCursor.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Types
|
||||
|
||||
private sealed record FederationSiteInfo
|
||||
{
|
||||
public required string SiteId { get; init; }
|
||||
public string? DisplayName { get; init; }
|
||||
public bool Enabled { get; init; }
|
||||
public string? LastCursor { get; init; }
|
||||
public DateTimeOffset? LastSyncAt { get; init; }
|
||||
public int BundlesImported { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private async Task<Stream> CreateTestBundleAsync(
|
||||
BundleManifest manifest,
|
||||
int canonicalCount,
|
||||
int edgeCount,
|
||||
int deletionCount)
|
||||
{
|
||||
var tarBuffer = new MemoryStream();
|
||||
|
||||
await using (var tarWriter = new TarWriter(tarBuffer, leaveOpen: true))
|
||||
{
|
||||
var manifestJson = JsonSerializer.Serialize(manifest, BundleSerializer.Options);
|
||||
await WriteEntryAsync(tarWriter, "MANIFEST.json", manifestJson);
|
||||
|
||||
var canonicalsNdjson = new StringBuilder();
|
||||
for (var i = 1; i <= canonicalCount; i++)
|
||||
{
|
||||
var canonical = new CanonicalBundleLine
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Cve = $"CVE-2024-{i:D4}",
|
||||
AffectsKey = $"pkg:generic/test{i}@1.0",
|
||||
MergeHash = $"sha256:hash{i}",
|
||||
Status = "active",
|
||||
Title = $"Test Advisory {i}",
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
canonicalsNdjson.AppendLine(JsonSerializer.Serialize(canonical, BundleSerializer.Options));
|
||||
}
|
||||
await WriteEntryAsync(tarWriter, "canonicals.ndjson", canonicalsNdjson.ToString());
|
||||
|
||||
var edgesNdjson = new StringBuilder();
|
||||
for (var i = 1; i <= edgeCount; i++)
|
||||
{
|
||||
var edge = new EdgeBundleLine
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Source = "nvd",
|
||||
SourceAdvisoryId = $"CVE-2024-{i:D4}",
|
||||
ContentHash = $"sha256:edge{i}",
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
edgesNdjson.AppendLine(JsonSerializer.Serialize(edge, BundleSerializer.Options));
|
||||
}
|
||||
await WriteEntryAsync(tarWriter, "edges.ndjson", edgesNdjson.ToString());
|
||||
|
||||
var deletionsNdjson = new StringBuilder();
|
||||
for (var i = 1; i <= deletionCount; i++)
|
||||
{
|
||||
var deletion = new DeletionBundleLine
|
||||
{
|
||||
CanonicalId = Guid.NewGuid(),
|
||||
Reason = "rejected",
|
||||
DeletedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
deletionsNdjson.AppendLine(JsonSerializer.Serialize(deletion, BundleSerializer.Options));
|
||||
}
|
||||
await WriteEntryAsync(tarWriter, "deletions.ndjson", deletionsNdjson.ToString());
|
||||
}
|
||||
|
||||
tarBuffer.Position = 0;
|
||||
|
||||
var compressedBuffer = new MemoryStream();
|
||||
await ZstdCompression.CompressAsync(tarBuffer, compressedBuffer);
|
||||
compressedBuffer.Position = 0;
|
||||
|
||||
_disposableStreams.Add(compressedBuffer);
|
||||
return compressedBuffer;
|
||||
}
|
||||
|
||||
private async Task<Stream> CreateTestBundleWithSpecificHashAsync(
|
||||
BundleManifest manifest,
|
||||
string mergeHash)
|
||||
{
|
||||
var tarBuffer = new MemoryStream();
|
||||
|
||||
await using (var tarWriter = new TarWriter(tarBuffer, leaveOpen: true))
|
||||
{
|
||||
var manifestJson = JsonSerializer.Serialize(manifest, BundleSerializer.Options);
|
||||
await WriteEntryAsync(tarWriter, "MANIFEST.json", manifestJson);
|
||||
|
||||
var canonical = new CanonicalBundleLine
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Cve = "CVE-2024-1234",
|
||||
AffectsKey = "pkg:npm/express@4.0.0",
|
||||
MergeHash = mergeHash,
|
||||
Status = "active",
|
||||
Title = "Express vulnerability",
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
var canonicalsNdjson = JsonSerializer.Serialize(canonical, BundleSerializer.Options) + "\n";
|
||||
await WriteEntryAsync(tarWriter, "canonicals.ndjson", canonicalsNdjson);
|
||||
await WriteEntryAsync(tarWriter, "edges.ndjson", "");
|
||||
await WriteEntryAsync(tarWriter, "deletions.ndjson", "");
|
||||
}
|
||||
|
||||
tarBuffer.Position = 0;
|
||||
|
||||
var compressedBuffer = new MemoryStream();
|
||||
await ZstdCompression.CompressAsync(tarBuffer, compressedBuffer);
|
||||
compressedBuffer.Position = 0;
|
||||
|
||||
_disposableStreams.Add(compressedBuffer);
|
||||
return compressedBuffer;
|
||||
}
|
||||
|
||||
private static async Task WriteEntryAsync(TarWriter tarWriter, string name, string content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
|
||||
{
|
||||
DataStream = new MemoryStream(bytes)
|
||||
};
|
||||
await tarWriter.WriteEntryAsync(entry);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,708 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// InterestScoreRepositoryTests.cs
|
||||
// Sprint: SPRINT_8200_0013_0002_CONCEL_interest_scoring
|
||||
// Task: ISCORE-8200-004
|
||||
// Description: Integration tests for InterestScoreRepository CRUD operations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Concelier.Interest;
|
||||
using StellaOps.Concelier.Interest.Models;
|
||||
using StellaOps.Concelier.Storage.Postgres.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Concelier.Storage.Postgres.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for <see cref="InterestScoreRepository"/>.
|
||||
/// Tests CRUD operations, batch operations, and query functionality.
|
||||
/// </summary>
|
||||
[Collection(ConcelierPostgresCollection.Name)]
|
||||
public sealed class InterestScoreRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ConcelierPostgresFixture _fixture;
|
||||
private readonly ConcelierDataSource _dataSource;
|
||||
private readonly InterestScoreRepository _repository;
|
||||
|
||||
public InterestScoreRepositoryTests(ConcelierPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
|
||||
var options = fixture.Fixture.CreateOptions();
|
||||
_dataSource = new ConcelierDataSource(Options.Create(options), NullLogger<ConcelierDataSource>.Instance);
|
||||
_repository = new InterestScoreRepository(_dataSource, NullLogger<InterestScoreRepository>.Instance);
|
||||
}
|
||||
|
||||
public Task InitializeAsync() => _fixture.TruncateAllTablesAsync();
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
#region GetByCanonicalIdAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCanonicalIdAsync_ShouldReturnScore_WhenExists()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore();
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result!.CanonicalId.Should().Be(score.CanonicalId);
|
||||
result.Score.Should().Be(score.Score);
|
||||
result.Reasons.Should().BeEquivalentTo(score.Reasons);
|
||||
result.ComputedAt.Should().BeCloseTo(score.ComputedAt, TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCanonicalIdAsync_ShouldReturnNull_WhenNotExists()
|
||||
{
|
||||
// Act
|
||||
var result = await _repository.GetByCanonicalIdAsync(Guid.NewGuid());
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetByCanonicalIdsAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCanonicalIdsAsync_ShouldReturnMatchingScores()
|
||||
{
|
||||
// Arrange
|
||||
var score1 = CreateTestScore();
|
||||
var score2 = CreateTestScore();
|
||||
var score3 = CreateTestScore();
|
||||
await _repository.SaveAsync(score1);
|
||||
await _repository.SaveAsync(score2);
|
||||
await _repository.SaveAsync(score3);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetByCanonicalIdsAsync([score1.CanonicalId, score3.CanonicalId]);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(2);
|
||||
result.Keys.Should().Contain(score1.CanonicalId);
|
||||
result.Keys.Should().Contain(score3.CanonicalId);
|
||||
result.Keys.Should().NotContain(score2.CanonicalId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCanonicalIdsAsync_ShouldReturnEmptyDictionary_WhenNoMatches()
|
||||
{
|
||||
// Act
|
||||
var result = await _repository.GetByCanonicalIdsAsync([Guid.NewGuid(), Guid.NewGuid()]);
|
||||
|
||||
// Assert
|
||||
result.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByCanonicalIdsAsync_ShouldReturnEmptyDictionary_WhenEmptyInput()
|
||||
{
|
||||
// Act
|
||||
var result = await _repository.GetByCanonicalIdsAsync([]);
|
||||
|
||||
// Assert
|
||||
result.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SaveAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldInsertNewScore()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore(score: 0.75, reasons: ["in_sbom", "reachable", "deployed"]);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result.Should().NotBeNull();
|
||||
result!.Score.Should().Be(0.75);
|
||||
result.Reasons.Should().BeEquivalentTo(["in_sbom", "reachable", "deployed"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldUpdateExistingScore_OnConflict()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
var original = CreateTestScore(canonicalId: canonicalId, score: 0.5, reasons: ["in_sbom"]);
|
||||
await _repository.SaveAsync(original);
|
||||
|
||||
var updated = CreateTestScore(
|
||||
canonicalId: canonicalId,
|
||||
score: 0.85,
|
||||
reasons: ["in_sbom", "reachable", "deployed", "no_vex_na"]);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(updated);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(canonicalId);
|
||||
result.Should().NotBeNull();
|
||||
result!.Score.Should().Be(0.85);
|
||||
result.Reasons.Should().BeEquivalentTo(["in_sbom", "reachable", "deployed", "no_vex_na"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldStoreLastSeenInBuild()
|
||||
{
|
||||
// Arrange
|
||||
var buildId = Guid.NewGuid();
|
||||
var score = CreateTestScore(lastSeenInBuild: buildId);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result.Should().NotBeNull();
|
||||
result!.LastSeenInBuild.Should().Be(buildId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldHandleNullLastSeenInBuild()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore(lastSeenInBuild: null);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result.Should().NotBeNull();
|
||||
result!.LastSeenInBuild.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldStoreEmptyReasons()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore(reasons: []);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result.Should().NotBeNull();
|
||||
result!.Reasons.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SaveManyAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SaveManyAsync_ShouldInsertMultipleScores()
|
||||
{
|
||||
// Arrange
|
||||
var scores = new[]
|
||||
{
|
||||
CreateTestScore(score: 0.9),
|
||||
CreateTestScore(score: 0.5),
|
||||
CreateTestScore(score: 0.1)
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.SaveManyAsync(scores);
|
||||
|
||||
// Assert
|
||||
var count = await _repository.CountAsync();
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveManyAsync_ShouldUpsertOnConflict()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalId = Guid.NewGuid();
|
||||
var original = CreateTestScore(canonicalId: canonicalId, score: 0.3);
|
||||
await _repository.SaveAsync(original);
|
||||
|
||||
var scores = new[]
|
||||
{
|
||||
CreateTestScore(canonicalId: canonicalId, score: 0.8), // Update existing
|
||||
CreateTestScore(score: 0.6) // New score
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.SaveManyAsync(scores);
|
||||
|
||||
// Assert
|
||||
var count = await _repository.CountAsync();
|
||||
count.Should().Be(2); // 1 updated + 1 new
|
||||
|
||||
var result = await _repository.GetByCanonicalIdAsync(canonicalId);
|
||||
result!.Score.Should().Be(0.8);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveManyAsync_ShouldHandleEmptyInput()
|
||||
{
|
||||
// Act - should not throw
|
||||
await _repository.SaveManyAsync([]);
|
||||
|
||||
// Assert
|
||||
var count = await _repository.CountAsync();
|
||||
count.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region DeleteAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteAsync_ShouldRemoveScore()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore();
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Verify exists
|
||||
var exists = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
exists.Should().NotBeNull();
|
||||
|
||||
// Act
|
||||
await _repository.DeleteAsync(score.CanonicalId);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DeleteAsync_ShouldNotThrow_WhenNotExists()
|
||||
{
|
||||
// Act - should not throw
|
||||
await _repository.DeleteAsync(Guid.NewGuid());
|
||||
|
||||
// Assert - no exception
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetLowScoreCanonicalIdsAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetLowScoreCanonicalIdsAsync_ShouldReturnIdsBelowThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var oldDate = DateTimeOffset.UtcNow.AddDays(-10);
|
||||
var lowScore1 = CreateTestScore(score: 0.1, computedAt: oldDate);
|
||||
var lowScore2 = CreateTestScore(score: 0.15, computedAt: oldDate);
|
||||
var highScore = CreateTestScore(score: 0.8, computedAt: oldDate);
|
||||
|
||||
await _repository.SaveAsync(lowScore1);
|
||||
await _repository.SaveAsync(lowScore2);
|
||||
await _repository.SaveAsync(highScore);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetLowScoreCanonicalIdsAsync(
|
||||
threshold: 0.2,
|
||||
minAge: TimeSpan.FromDays(5),
|
||||
limit: 100);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(2);
|
||||
result.Should().Contain(lowScore1.CanonicalId);
|
||||
result.Should().Contain(lowScore2.CanonicalId);
|
||||
result.Should().NotContain(highScore.CanonicalId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLowScoreCanonicalIdsAsync_ShouldRespectMinAge()
|
||||
{
|
||||
// Arrange - one old, one recent
|
||||
var oldScore = CreateTestScore(score: 0.1, computedAt: DateTimeOffset.UtcNow.AddDays(-10));
|
||||
var recentScore = CreateTestScore(score: 0.1, computedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
await _repository.SaveAsync(oldScore);
|
||||
await _repository.SaveAsync(recentScore);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetLowScoreCanonicalIdsAsync(
|
||||
threshold: 0.2,
|
||||
minAge: TimeSpan.FromDays(5),
|
||||
limit: 100);
|
||||
|
||||
// Assert
|
||||
result.Should().ContainSingle();
|
||||
result.Should().Contain(oldScore.CanonicalId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLowScoreCanonicalIdsAsync_ShouldRespectLimit()
|
||||
{
|
||||
// Arrange
|
||||
var oldDate = DateTimeOffset.UtcNow.AddDays(-10);
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.1, computedAt: oldDate));
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetLowScoreCanonicalIdsAsync(
|
||||
threshold: 0.2,
|
||||
minAge: TimeSpan.FromDays(5),
|
||||
limit: 5);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetHighScoreCanonicalIdsAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetHighScoreCanonicalIdsAsync_ShouldReturnIdsAboveThreshold()
|
||||
{
|
||||
// Arrange
|
||||
var highScore1 = CreateTestScore(score: 0.9);
|
||||
var highScore2 = CreateTestScore(score: 0.75);
|
||||
var lowScore = CreateTestScore(score: 0.3);
|
||||
|
||||
await _repository.SaveAsync(highScore1);
|
||||
await _repository.SaveAsync(highScore2);
|
||||
await _repository.SaveAsync(lowScore);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetHighScoreCanonicalIdsAsync(
|
||||
threshold: 0.7,
|
||||
limit: 100);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(2);
|
||||
result.Should().Contain(highScore1.CanonicalId);
|
||||
result.Should().Contain(highScore2.CanonicalId);
|
||||
result.Should().NotContain(lowScore.CanonicalId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetHighScoreCanonicalIdsAsync_ShouldRespectLimit()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.8));
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetHighScoreCanonicalIdsAsync(
|
||||
threshold: 0.7,
|
||||
limit: 5);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetTopScoresAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetTopScoresAsync_ShouldReturnTopScoresDescending()
|
||||
{
|
||||
// Arrange
|
||||
var low = CreateTestScore(score: 0.2);
|
||||
var medium = CreateTestScore(score: 0.5);
|
||||
var high = CreateTestScore(score: 0.9);
|
||||
|
||||
await _repository.SaveAsync(low);
|
||||
await _repository.SaveAsync(medium);
|
||||
await _repository.SaveAsync(high);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetTopScoresAsync(limit: 10);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(3);
|
||||
result[0].Score.Should().Be(0.9);
|
||||
result[1].Score.Should().Be(0.5);
|
||||
result[2].Score.Should().Be(0.2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetTopScoresAsync_ShouldRespectLimit()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.1 * (i + 1)));
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetTopScoresAsync(limit: 3);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetAllAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetAllAsync_ShouldReturnPaginatedResults()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.1 * (i + 1)));
|
||||
}
|
||||
|
||||
// Act
|
||||
var page1 = await _repository.GetAllAsync(offset: 0, limit: 5);
|
||||
var page2 = await _repository.GetAllAsync(offset: 5, limit: 5);
|
||||
|
||||
// Assert
|
||||
page1.Should().HaveCount(5);
|
||||
page2.Should().HaveCount(5);
|
||||
|
||||
// No overlap
|
||||
var page1Ids = page1.Select(s => s.CanonicalId).ToHashSet();
|
||||
var page2Ids = page2.Select(s => s.CanonicalId).ToHashSet();
|
||||
page1Ids.Intersect(page2Ids).Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetStaleCanonicalIdsAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetStaleCanonicalIdsAsync_ShouldReturnIdsOlderThanCutoff()
|
||||
{
|
||||
// Arrange
|
||||
var stale = CreateTestScore(computedAt: DateTimeOffset.UtcNow.AddDays(-30));
|
||||
var fresh = CreateTestScore(computedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
await _repository.SaveAsync(stale);
|
||||
await _repository.SaveAsync(fresh);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetStaleCanonicalIdsAsync(
|
||||
staleAfter: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
limit: 100);
|
||||
|
||||
// Assert
|
||||
result.Should().ContainSingle();
|
||||
result.Should().Contain(stale.CanonicalId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStaleCanonicalIdsAsync_ShouldRespectLimit()
|
||||
{
|
||||
// Arrange
|
||||
var oldDate = DateTimeOffset.UtcNow.AddDays(-30);
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _repository.SaveAsync(CreateTestScore(computedAt: oldDate));
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetStaleCanonicalIdsAsync(
|
||||
staleAfter: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
limit: 5);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CountAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ShouldReturnTotalCount()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.SaveAsync(CreateTestScore());
|
||||
await _repository.SaveAsync(CreateTestScore());
|
||||
await _repository.SaveAsync(CreateTestScore());
|
||||
|
||||
// Act
|
||||
var count = await _repository.CountAsync();
|
||||
|
||||
// Assert
|
||||
count.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CountAsync_ShouldReturnZero_WhenEmpty()
|
||||
{
|
||||
// Act
|
||||
var count = await _repository.CountAsync();
|
||||
|
||||
// Assert
|
||||
count.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetDistributionAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GetDistributionAsync_ShouldReturnCorrectDistribution()
|
||||
{
|
||||
// Arrange - create scores in different tiers
|
||||
// High tier (>= 0.7)
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.9));
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.8));
|
||||
// Medium tier (0.4 - 0.7)
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.5));
|
||||
// Low tier (0.2 - 0.4)
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.3));
|
||||
// None tier (< 0.2)
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.1));
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.05));
|
||||
|
||||
// Act
|
||||
var distribution = await _repository.GetDistributionAsync();
|
||||
|
||||
// Assert
|
||||
distribution.TotalCount.Should().Be(6);
|
||||
distribution.HighCount.Should().Be(2);
|
||||
distribution.MediumCount.Should().Be(1);
|
||||
distribution.LowCount.Should().Be(1);
|
||||
distribution.NoneCount.Should().Be(2);
|
||||
distribution.AverageScore.Should().BeGreaterThan(0);
|
||||
distribution.MedianScore.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetDistributionAsync_ShouldReturnEmptyDistribution_WhenNoScores()
|
||||
{
|
||||
// Act
|
||||
var distribution = await _repository.GetDistributionAsync();
|
||||
|
||||
// Assert
|
||||
distribution.TotalCount.Should().Be(0);
|
||||
distribution.HighCount.Should().Be(0);
|
||||
distribution.MediumCount.Should().Be(0);
|
||||
distribution.LowCount.Should().Be(0);
|
||||
distribution.NoneCount.Should().Be(0);
|
||||
distribution.AverageScore.Should().Be(0);
|
||||
distribution.MedianScore.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetScoreDistributionAsync_ShouldBeAliasForGetDistributionAsync()
|
||||
{
|
||||
// Arrange
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.9));
|
||||
await _repository.SaveAsync(CreateTestScore(score: 0.5));
|
||||
|
||||
// Act
|
||||
var distribution1 = await _repository.GetDistributionAsync();
|
||||
var distribution2 = await _repository.GetScoreDistributionAsync();
|
||||
|
||||
// Assert - both should return equivalent results
|
||||
distribution1.TotalCount.Should().Be(distribution2.TotalCount);
|
||||
distribution1.HighCount.Should().Be(distribution2.HighCount);
|
||||
distribution1.AverageScore.Should().Be(distribution2.AverageScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldHandleMaxScore()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore(score: 1.0);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result!.Score.Should().Be(1.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldHandleMinScore()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateTestScore(score: 0.0);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result!.Score.Should().Be(0.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ShouldHandleManyReasons()
|
||||
{
|
||||
// Arrange
|
||||
var reasons = new[] { "in_sbom", "reachable", "deployed", "no_vex_na", "recent", "custom_1", "custom_2" };
|
||||
var score = CreateTestScore(reasons: reasons);
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(score);
|
||||
|
||||
// Assert
|
||||
var result = await _repository.GetByCanonicalIdAsync(score.CanonicalId);
|
||||
result!.Reasons.Should().BeEquivalentTo(reasons);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetTopScoresAsync_ShouldOrderByScoreThenComputedAt()
|
||||
{
|
||||
// Arrange - same score, different computed_at
|
||||
var older = CreateTestScore(score: 0.8, computedAt: DateTimeOffset.UtcNow.AddHours(-1));
|
||||
var newer = CreateTestScore(score: 0.8, computedAt: DateTimeOffset.UtcNow);
|
||||
|
||||
await _repository.SaveAsync(older);
|
||||
await _repository.SaveAsync(newer);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetTopScoresAsync(limit: 10);
|
||||
|
||||
// Assert
|
||||
result.Should().HaveCount(2);
|
||||
// Newer should come first (DESC order on computed_at as secondary)
|
||||
result[0].CanonicalId.Should().Be(newer.CanonicalId);
|
||||
result[1].CanonicalId.Should().Be(older.CanonicalId);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static InterestScore CreateTestScore(
|
||||
Guid? canonicalId = null,
|
||||
double score = 0.5,
|
||||
string[]? reasons = null,
|
||||
Guid? lastSeenInBuild = null,
|
||||
DateTimeOffset? computedAt = null)
|
||||
{
|
||||
return new InterestScore
|
||||
{
|
||||
CanonicalId = canonicalId ?? Guid.NewGuid(),
|
||||
Score = score,
|
||||
Reasons = reasons ?? ["in_sbom"],
|
||||
LastSeenInBuild = lastSeenInBuild,
|
||||
ComputedAt = computedAt ?? DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user