partly or unimplemented features - now implemented
This commit is contained in:
@@ -0,0 +1,677 @@
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.ReleaseOrchestrator.Performance.Baseline;
|
||||
|
||||
/// <summary>
|
||||
/// Records and compares performance baselines for regression detection.
|
||||
/// </summary>
|
||||
public sealed class BaselineTracker
|
||||
{
|
||||
private readonly IBaselineStore _store;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly BaselineTrackerConfig _config;
|
||||
private readonly ILogger<BaselineTracker> _logger;
|
||||
private readonly ConcurrentDictionary<string, MetricWindow> _activeWindows = new();
|
||||
|
||||
public BaselineTracker(
|
||||
IBaselineStore store,
|
||||
TimeProvider timeProvider,
|
||||
BaselineTrackerConfig config,
|
||||
ILogger<BaselineTracker> logger)
|
||||
{
|
||||
_store = store;
|
||||
_timeProvider = timeProvider;
|
||||
_config = config;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a performance metric observation.
|
||||
/// </summary>
|
||||
public async Task RecordMetricAsync(
|
||||
PerformanceMetric metric,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(metric);
|
||||
|
||||
metric = metric with
|
||||
{
|
||||
Id = metric.Id == Guid.Empty ? Guid.NewGuid() : metric.Id,
|
||||
RecordedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
// Add to active window
|
||||
var window = GetOrCreateWindow(metric.MetricName);
|
||||
window.AddObservation(metric);
|
||||
|
||||
// Persist if window is complete
|
||||
if (window.IsComplete)
|
||||
{
|
||||
await FlushWindowAsync(window, ct);
|
||||
}
|
||||
|
||||
_logger.LogTrace(
|
||||
"Recorded metric {MetricName}: {Value} {Unit}",
|
||||
metric.MetricName, metric.Value, metric.Unit);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares current metrics against baseline and detects regressions.
|
||||
/// </summary>
|
||||
public async Task<RegressionAnalysis> AnalyzeAsync(
|
||||
string metricName,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var baseline = await _store.GetBaselineAsync(metricName, ct);
|
||||
if (baseline is null)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"No baseline found for metric {MetricName}",
|
||||
metricName);
|
||||
|
||||
return new RegressionAnalysis
|
||||
{
|
||||
MetricName = metricName,
|
||||
HasBaseline = false,
|
||||
Status = RegressionStatus.NoBaseline,
|
||||
AnalyzedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
var window = GetOrCreateWindow(metricName);
|
||||
var observations = window.GetObservations();
|
||||
|
||||
if (observations.Length < _config.MinObservationsForAnalysis)
|
||||
{
|
||||
return new RegressionAnalysis
|
||||
{
|
||||
MetricName = metricName,
|
||||
HasBaseline = true,
|
||||
Baseline = baseline,
|
||||
Status = RegressionStatus.InsufficientData,
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
ObservationCount = observations.Length
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate current statistics
|
||||
var currentStats = CalculateStatistics(observations);
|
||||
|
||||
// Compare against baseline
|
||||
var comparison = CompareWithBaseline(currentStats, baseline);
|
||||
|
||||
var status = DetermineRegressionStatus(comparison);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Regression analysis for {MetricName}: {Status} (delta: {Delta:P2})",
|
||||
metricName, status, comparison.PercentChange / 100);
|
||||
|
||||
return new RegressionAnalysis
|
||||
{
|
||||
MetricName = metricName,
|
||||
HasBaseline = true,
|
||||
Baseline = baseline,
|
||||
CurrentStats = currentStats,
|
||||
Comparison = comparison,
|
||||
Status = status,
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
ObservationCount = observations.Length
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Establishes a new baseline from current observations.
|
||||
/// </summary>
|
||||
public async Task<PerformanceBaseline> EstablishBaselineAsync(
|
||||
string metricName,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var window = GetOrCreateWindow(metricName);
|
||||
var observations = window.GetObservations();
|
||||
|
||||
if (observations.Length < _config.MinObservationsForBaseline)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Insufficient observations for baseline. Required: {_config.MinObservationsForBaseline}, " +
|
||||
$"Available: {observations.Length}");
|
||||
}
|
||||
|
||||
var stats = CalculateStatistics(observations);
|
||||
|
||||
var baseline = new PerformanceBaseline
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = metricName,
|
||||
Mean = stats.Mean,
|
||||
Median = stats.Median,
|
||||
P90 = stats.P90,
|
||||
P95 = stats.P95,
|
||||
P99 = stats.P99,
|
||||
Min = stats.Min,
|
||||
Max = stats.Max,
|
||||
StandardDeviation = stats.StandardDeviation,
|
||||
SampleCount = observations.Length,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + _config.BaselineValidity
|
||||
};
|
||||
|
||||
await _store.SaveBaselineAsync(baseline, ct);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Established baseline for {MetricName}: mean={Mean}, p95={P95}",
|
||||
metricName, baseline.Mean, baseline.P95);
|
||||
|
||||
return baseline;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates an existing baseline with new observations using exponential smoothing.
|
||||
/// </summary>
|
||||
public async Task<PerformanceBaseline> UpdateBaselineAsync(
|
||||
string metricName,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var existingBaseline = await _store.GetBaselineAsync(metricName, ct);
|
||||
if (existingBaseline is null)
|
||||
{
|
||||
return await EstablishBaselineAsync(metricName, ct);
|
||||
}
|
||||
|
||||
var window = GetOrCreateWindow(metricName);
|
||||
var observations = window.GetObservations();
|
||||
|
||||
if (observations.Length < _config.MinObservationsForAnalysis)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Insufficient new observations for update. Required: {_config.MinObservationsForAnalysis}");
|
||||
}
|
||||
|
||||
var newStats = CalculateStatistics(observations);
|
||||
var alpha = _config.SmoothingFactor;
|
||||
|
||||
var updatedBaseline = existingBaseline with
|
||||
{
|
||||
Mean = alpha * newStats.Mean + (1 - alpha) * existingBaseline.Mean,
|
||||
Median = alpha * newStats.Median + (1 - alpha) * existingBaseline.Median,
|
||||
P90 = alpha * newStats.P90 + (1 - alpha) * existingBaseline.P90,
|
||||
P95 = alpha * newStats.P95 + (1 - alpha) * existingBaseline.P95,
|
||||
P99 = alpha * newStats.P99 + (1 - alpha) * existingBaseline.P99,
|
||||
Min = Math.Min(newStats.Min, existingBaseline.Min),
|
||||
Max = Math.Max(newStats.Max, existingBaseline.Max),
|
||||
StandardDeviation = alpha * newStats.StandardDeviation +
|
||||
(1 - alpha) * existingBaseline.StandardDeviation,
|
||||
SampleCount = existingBaseline.SampleCount + observations.Length,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + _config.BaselineValidity
|
||||
};
|
||||
|
||||
await _store.SaveBaselineAsync(updatedBaseline, ct);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Updated baseline for {MetricName}: mean={Mean} (was {OldMean})",
|
||||
metricName, updatedBaseline.Mean, existingBaseline.Mean);
|
||||
|
||||
return updatedBaseline;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current regression status for all tracked metrics.
|
||||
/// </summary>
|
||||
public async Task<ImmutableArray<RegressionAnalysis>> AnalyzeAllAsync(
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = new List<RegressionAnalysis>();
|
||||
|
||||
foreach (var metricName in _activeWindows.Keys)
|
||||
{
|
||||
try
|
||||
{
|
||||
var analysis = await AnalyzeAsync(metricName, ct);
|
||||
results.Add(analysis);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Error analyzing metric {MetricName}",
|
||||
metricName);
|
||||
}
|
||||
}
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets current tracker statistics.
|
||||
/// </summary>
|
||||
public BaselineTrackerStatistics GetStatistics()
|
||||
{
|
||||
return new BaselineTrackerStatistics
|
||||
{
|
||||
ActiveMetrics = _activeWindows.Count,
|
||||
TotalObservations = _activeWindows.Values.Sum(w => w.ObservationCount),
|
||||
OldestObservation = _activeWindows.Values
|
||||
.SelectMany(w => w.GetObservations())
|
||||
.OrderBy(o => o.RecordedAt)
|
||||
.FirstOrDefault()?.RecordedAt,
|
||||
Timestamp = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
private MetricWindow GetOrCreateWindow(string metricName)
|
||||
{
|
||||
return _activeWindows.GetOrAdd(metricName, _ => new MetricWindow(
|
||||
metricName,
|
||||
_config.WindowSize,
|
||||
_config.WindowDuration,
|
||||
_timeProvider));
|
||||
}
|
||||
|
||||
private async Task FlushWindowAsync(MetricWindow window, CancellationToken ct)
|
||||
{
|
||||
var observations = window.GetObservations();
|
||||
if (observations.Length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var aggregate = new MetricAggregate
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = window.MetricName,
|
||||
Statistics = CalculateStatistics(observations),
|
||||
SampleCount = observations.Length,
|
||||
WindowStart = observations.Min(o => o.RecordedAt),
|
||||
WindowEnd = observations.Max(o => o.RecordedAt),
|
||||
AggregatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
await _store.SaveAggregateAsync(aggregate, ct);
|
||||
window.Clear();
|
||||
|
||||
_logger.LogDebug(
|
||||
"Flushed metric window for {MetricName}: {Count} observations",
|
||||
window.MetricName, observations.Length);
|
||||
}
|
||||
|
||||
private MetricStatistics CalculateStatistics(ImmutableArray<PerformanceMetric> observations)
|
||||
{
|
||||
if (observations.Length == 0)
|
||||
{
|
||||
return new MetricStatistics();
|
||||
}
|
||||
|
||||
var values = observations.Select(o => o.Value).OrderBy(v => v).ToArray();
|
||||
var count = values.Length;
|
||||
|
||||
var mean = values.Average();
|
||||
var variance = values.Sum(v => Math.Pow(v - mean, 2)) / count;
|
||||
var stdDev = Math.Sqrt(variance);
|
||||
|
||||
return new MetricStatistics
|
||||
{
|
||||
Mean = mean,
|
||||
Median = Percentile(values, 50),
|
||||
P90 = Percentile(values, 90),
|
||||
P95 = Percentile(values, 95),
|
||||
P99 = Percentile(values, 99),
|
||||
Min = values[0],
|
||||
Max = values[^1],
|
||||
StandardDeviation = stdDev,
|
||||
SampleCount = count
|
||||
};
|
||||
}
|
||||
|
||||
private static double Percentile(double[] sortedValues, double percentile)
|
||||
{
|
||||
if (sortedValues.Length == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var index = (percentile / 100) * (sortedValues.Length - 1);
|
||||
var lower = (int)Math.Floor(index);
|
||||
var upper = (int)Math.Ceiling(index);
|
||||
|
||||
if (lower == upper)
|
||||
{
|
||||
return sortedValues[lower];
|
||||
}
|
||||
|
||||
var weight = index - lower;
|
||||
return sortedValues[lower] * (1 - weight) + sortedValues[upper] * weight;
|
||||
}
|
||||
|
||||
private BaselineComparison CompareWithBaseline(
|
||||
MetricStatistics current,
|
||||
PerformanceBaseline baseline)
|
||||
{
|
||||
var meanDelta = current.Mean - baseline.Mean;
|
||||
var percentChange = baseline.Mean != 0
|
||||
? (meanDelta / baseline.Mean) * 100
|
||||
: 0;
|
||||
|
||||
var p95Delta = current.P95 - baseline.P95;
|
||||
var p95PercentChange = baseline.P95 != 0
|
||||
? (p95Delta / baseline.P95) * 100
|
||||
: 0;
|
||||
|
||||
// Calculate Z-score (how many standard deviations from baseline mean)
|
||||
var zScore = baseline.StandardDeviation != 0
|
||||
? meanDelta / baseline.StandardDeviation
|
||||
: 0;
|
||||
|
||||
return new BaselineComparison
|
||||
{
|
||||
MeanDelta = meanDelta,
|
||||
PercentChange = percentChange,
|
||||
P95Delta = p95Delta,
|
||||
P95PercentChange = p95PercentChange,
|
||||
ZScore = zScore,
|
||||
IsSignificant = Math.Abs(zScore) > _config.SignificanceThreshold
|
||||
};
|
||||
}
|
||||
|
||||
private RegressionStatus DetermineRegressionStatus(BaselineComparison comparison)
|
||||
{
|
||||
if (!comparison.IsSignificant)
|
||||
{
|
||||
return RegressionStatus.Normal;
|
||||
}
|
||||
|
||||
if (comparison.PercentChange > _config.RegressionThresholdPercent)
|
||||
{
|
||||
if (comparison.PercentChange > _config.SevereRegressionThresholdPercent)
|
||||
{
|
||||
return RegressionStatus.SevereRegression;
|
||||
}
|
||||
|
||||
return RegressionStatus.Regression;
|
||||
}
|
||||
|
||||
if (comparison.PercentChange < -_config.ImprovementThresholdPercent)
|
||||
{
|
||||
return RegressionStatus.Improvement;
|
||||
}
|
||||
|
||||
return RegressionStatus.Normal;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sliding window of metric observations.
|
||||
/// </summary>
|
||||
internal sealed class MetricWindow
|
||||
{
|
||||
private readonly List<PerformanceMetric> _observations = [];
|
||||
private readonly object _lock = new();
|
||||
private readonly int _maxSize;
|
||||
private readonly TimeSpan _maxDuration;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public MetricWindow(
|
||||
string metricName,
|
||||
int maxSize,
|
||||
TimeSpan maxDuration,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
MetricName = metricName;
|
||||
_maxSize = maxSize;
|
||||
_maxDuration = maxDuration;
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public string MetricName { get; }
|
||||
public int ObservationCount => _observations.Count;
|
||||
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (_observations.Count >= _maxSize)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (_observations.Count > 0)
|
||||
{
|
||||
var oldest = _observations.Min(o => o.RecordedAt);
|
||||
var age = _timeProvider.GetUtcNow() - oldest;
|
||||
return age >= _maxDuration;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void AddObservation(PerformanceMetric metric)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_observations.Add(metric);
|
||||
}
|
||||
}
|
||||
|
||||
public ImmutableArray<PerformanceMetric> GetObservations()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _observations.ToImmutableArray();
|
||||
}
|
||||
}
|
||||
|
||||
public void Clear()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_observations.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for baseline tracking.
|
||||
/// </summary>
|
||||
public sealed record BaselineTrackerConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum observations per metric window.
|
||||
/// </summary>
|
||||
public int WindowSize { get; init; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum duration of a metric window.
|
||||
/// </summary>
|
||||
public TimeSpan WindowDuration { get; init; } = TimeSpan.FromHours(1);
|
||||
|
||||
/// <summary>
|
||||
/// Minimum observations required to establish a baseline.
|
||||
/// </summary>
|
||||
public int MinObservationsForBaseline { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum observations required for analysis.
|
||||
/// </summary>
|
||||
public int MinObservationsForAnalysis { get; init; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// How long a baseline is valid before requiring refresh.
|
||||
/// </summary>
|
||||
public TimeSpan BaselineValidity { get; init; } = TimeSpan.FromDays(7);
|
||||
|
||||
/// <summary>
|
||||
/// Smoothing factor for exponential moving average (0-1).
|
||||
/// </summary>
|
||||
public double SmoothingFactor { get; init; } = 0.3;
|
||||
|
||||
/// <summary>
|
||||
/// Z-score threshold for statistical significance.
|
||||
/// </summary>
|
||||
public double SignificanceThreshold { get; init; } = 2.0;
|
||||
|
||||
/// <summary>
|
||||
/// Percent increase to flag as regression.
|
||||
/// </summary>
|
||||
public double RegressionThresholdPercent { get; init; } = 10.0;
|
||||
|
||||
/// <summary>
|
||||
/// Percent increase to flag as severe regression.
|
||||
/// </summary>
|
||||
public double SevereRegressionThresholdPercent { get; init; } = 25.0;
|
||||
|
||||
/// <summary>
|
||||
/// Percent decrease to flag as improvement.
|
||||
/// </summary>
|
||||
public double ImprovementThresholdPercent { get; init; } = 10.0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single performance metric observation.
|
||||
/// </summary>
|
||||
public sealed record PerformanceMetric
|
||||
{
|
||||
public Guid Id { get; init; }
|
||||
public required string MetricName { get; init; }
|
||||
public required double Value { get; init; }
|
||||
public required MetricUnit Unit { get; init; }
|
||||
public DateTimeOffset RecordedAt { get; init; }
|
||||
public ImmutableDictionary<string, string> Tags { get; init; } =
|
||||
ImmutableDictionary<string, string>.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A stored performance baseline.
|
||||
/// </summary>
|
||||
public sealed record PerformanceBaseline
|
||||
{
|
||||
public required Guid Id { get; init; }
|
||||
public required string MetricName { get; init; }
|
||||
public required double Mean { get; init; }
|
||||
public required double Median { get; init; }
|
||||
public required double P90 { get; init; }
|
||||
public required double P95 { get; init; }
|
||||
public required double P99 { get; init; }
|
||||
public required double Min { get; init; }
|
||||
public required double Max { get; init; }
|
||||
public required double StandardDeviation { get; init; }
|
||||
public required int SampleCount { get; init; }
|
||||
public required DateTimeOffset EstablishedAt { get; init; }
|
||||
public required DateTimeOffset ValidUntil { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Aggregated metric statistics.
|
||||
/// </summary>
|
||||
public sealed record MetricStatistics
|
||||
{
|
||||
public double Mean { get; init; }
|
||||
public double Median { get; init; }
|
||||
public double P90 { get; init; }
|
||||
public double P95 { get; init; }
|
||||
public double P99 { get; init; }
|
||||
public double Min { get; init; }
|
||||
public double Max { get; init; }
|
||||
public double StandardDeviation { get; init; }
|
||||
public int SampleCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stored metric aggregate.
|
||||
/// </summary>
|
||||
public sealed record MetricAggregate
|
||||
{
|
||||
public required Guid Id { get; init; }
|
||||
public required string MetricName { get; init; }
|
||||
public required MetricStatistics Statistics { get; init; }
|
||||
public required int SampleCount { get; init; }
|
||||
public required DateTimeOffset WindowStart { get; init; }
|
||||
public required DateTimeOffset WindowEnd { get; init; }
|
||||
public required DateTimeOffset AggregatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Comparison between current metrics and baseline.
|
||||
/// </summary>
|
||||
public sealed record BaselineComparison
|
||||
{
|
||||
public required double MeanDelta { get; init; }
|
||||
public required double PercentChange { get; init; }
|
||||
public required double P95Delta { get; init; }
|
||||
public required double P95PercentChange { get; init; }
|
||||
public required double ZScore { get; init; }
|
||||
public required bool IsSignificant { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of regression analysis.
|
||||
/// </summary>
|
||||
public sealed record RegressionAnalysis
|
||||
{
|
||||
public required string MetricName { get; init; }
|
||||
public required bool HasBaseline { get; init; }
|
||||
public PerformanceBaseline? Baseline { get; init; }
|
||||
public MetricStatistics? CurrentStats { get; init; }
|
||||
public BaselineComparison? Comparison { get; init; }
|
||||
public required RegressionStatus Status { get; init; }
|
||||
public required DateTimeOffset AnalyzedAt { get; init; }
|
||||
public int ObservationCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of regression analysis.
|
||||
/// </summary>
|
||||
public enum RegressionStatus
|
||||
{
|
||||
NoBaseline,
|
||||
InsufficientData,
|
||||
Normal,
|
||||
Improvement,
|
||||
Regression,
|
||||
SevereRegression
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Units for performance metrics.
|
||||
/// </summary>
|
||||
public enum MetricUnit
|
||||
{
|
||||
Milliseconds,
|
||||
Seconds,
|
||||
Bytes,
|
||||
Kilobytes,
|
||||
Megabytes,
|
||||
Count,
|
||||
Percent,
|
||||
RequestsPerSecond
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about the baseline tracker.
|
||||
/// </summary>
|
||||
public sealed record BaselineTrackerStatistics
|
||||
{
|
||||
public required int ActiveMetrics { get; init; }
|
||||
public required int TotalObservations { get; init; }
|
||||
public DateTimeOffset? OldestObservation { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for storing baselines and aggregates.
|
||||
/// </summary>
|
||||
public interface IBaselineStore
|
||||
{
|
||||
Task<PerformanceBaseline?> GetBaselineAsync(string metricName, CancellationToken ct = default);
|
||||
Task SaveBaselineAsync(PerformanceBaseline baseline, CancellationToken ct = default);
|
||||
Task SaveAggregateAsync(MetricAggregate aggregate, CancellationToken ct = default);
|
||||
Task<ImmutableArray<MetricAggregate>> GetAggregatesAsync(
|
||||
string metricName,
|
||||
DateTimeOffset from,
|
||||
DateTimeOffset to,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,685 @@
|
||||
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.ReleaseOrchestrator.Performance.Pooling;
|
||||
|
||||
/// <summary>
|
||||
/// Manages connection pools for registry and agent connections with configurable idle timeouts.
|
||||
/// </summary>
|
||||
public sealed class ConnectionPoolManager : BackgroundService, IAsyncDisposable
|
||||
{
|
||||
private readonly IConnectionFactory _connectionFactory;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ConnectionPoolConfig _config;
|
||||
private readonly ILogger<ConnectionPoolManager> _logger;
|
||||
private readonly ConcurrentDictionary<string, ConnectionPool> _pools = new();
|
||||
private readonly SemaphoreSlim _globalLimiter;
|
||||
private bool _disposed;
|
||||
|
||||
public ConnectionPoolManager(
|
||||
IConnectionFactory connectionFactory,
|
||||
TimeProvider timeProvider,
|
||||
ConnectionPoolConfig config,
|
||||
ILogger<ConnectionPoolManager> logger)
|
||||
{
|
||||
_connectionFactory = connectionFactory;
|
||||
_timeProvider = timeProvider;
|
||||
_config = config;
|
||||
_logger = logger;
|
||||
_globalLimiter = new SemaphoreSlim(config.MaxTotalConnections);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Acquires a connection from the pool for the specified endpoint.
|
||||
/// </summary>
|
||||
public async Task<IConnectionLease> AcquireAsync(
|
||||
string endpoint,
|
||||
ConnectionType connectionType,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ObjectDisposedException.ThrowIf(_disposed, this);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(endpoint);
|
||||
|
||||
var poolKey = GetPoolKey(endpoint, connectionType);
|
||||
var pool = GetOrCreatePool(poolKey, endpoint, connectionType);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Acquiring connection for {Endpoint} ({Type})",
|
||||
endpoint, connectionType);
|
||||
|
||||
// Wait for global capacity
|
||||
await _globalLimiter.WaitAsync(ct);
|
||||
|
||||
try
|
||||
{
|
||||
var connection = await pool.AcquireAsync(ct);
|
||||
|
||||
return new ConnectionLease(
|
||||
connection,
|
||||
pool,
|
||||
_globalLimiter,
|
||||
_timeProvider);
|
||||
}
|
||||
catch
|
||||
{
|
||||
_globalLimiter.Release();
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets statistics for all connection pools.
|
||||
/// </summary>
|
||||
public ConnectionPoolStatistics GetStatistics()
|
||||
{
|
||||
var poolStats = _pools.Values
|
||||
.Select(p => p.GetStatistics())
|
||||
.ToImmutableArray();
|
||||
|
||||
return new ConnectionPoolStatistics
|
||||
{
|
||||
TotalPools = _pools.Count,
|
||||
TotalConnections = poolStats.Sum(s => s.TotalConnections),
|
||||
ActiveConnections = poolStats.Sum(s => s.ActiveConnections),
|
||||
IdleConnections = poolStats.Sum(s => s.IdleConnections),
|
||||
PoolDetails = poolStats,
|
||||
GlobalCapacityUsed = _config.MaxTotalConnections - _globalLimiter.CurrentCount,
|
||||
GlobalCapacityAvailable = _globalLimiter.CurrentCount,
|
||||
Timestamp = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Forces cleanup of idle connections across all pools.
|
||||
/// </summary>
|
||||
public async Task CleanupIdleConnectionsAsync(CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Cleaning up idle connections across all pools");
|
||||
|
||||
var tasks = _pools.Values.Select(p => p.CleanupIdleAsync(ct));
|
||||
await Task.WhenAll(tasks);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Removes a specific pool and closes all its connections.
|
||||
/// </summary>
|
||||
public async Task RemovePoolAsync(
|
||||
string endpoint,
|
||||
ConnectionType connectionType,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var poolKey = GetPoolKey(endpoint, connectionType);
|
||||
|
||||
if (_pools.TryRemove(poolKey, out var pool))
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Removing pool for {Endpoint} ({Type})",
|
||||
endpoint, connectionType);
|
||||
|
||||
await pool.DisposeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Connection pool manager starting with idle timeout {IdleTimeout}",
|
||||
_config.IdleTimeout);
|
||||
|
||||
var timer = new PeriodicTimer(_config.CleanupInterval);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await timer.WaitForNextTickAsync(stoppingToken);
|
||||
await CleanupIdleConnectionsAsync(stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error during connection pool cleanup");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private ConnectionPool GetOrCreatePool(
|
||||
string poolKey,
|
||||
string endpoint,
|
||||
ConnectionType connectionType)
|
||||
{
|
||||
return _pools.GetOrAdd(poolKey, _ =>
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Creating new connection pool for {Endpoint} ({Type})",
|
||||
endpoint, connectionType);
|
||||
|
||||
var poolConfig = new PoolConfig
|
||||
{
|
||||
Endpoint = endpoint,
|
||||
ConnectionType = connectionType,
|
||||
MaxConnections = GetMaxConnectionsForType(connectionType),
|
||||
MinConnections = _config.MinConnectionsPerPool,
|
||||
IdleTimeout = _config.IdleTimeout,
|
||||
ConnectionTimeout = _config.ConnectionTimeout,
|
||||
MaxLifetime = _config.MaxConnectionLifetime
|
||||
};
|
||||
|
||||
return new ConnectionPool(
|
||||
_connectionFactory,
|
||||
_timeProvider,
|
||||
poolConfig,
|
||||
_logger);
|
||||
});
|
||||
}
|
||||
|
||||
private int GetMaxConnectionsForType(ConnectionType type)
|
||||
{
|
||||
return type switch
|
||||
{
|
||||
ConnectionType.Registry => _config.MaxRegistryConnectionsPerEndpoint,
|
||||
ConnectionType.Agent => _config.MaxAgentConnectionsPerEndpoint,
|
||||
ConnectionType.Database => _config.MaxDatabaseConnectionsPerEndpoint,
|
||||
_ => _config.MaxConnectionsPerPool
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetPoolKey(string endpoint, ConnectionType connectionType)
|
||||
{
|
||||
return $"{connectionType}:{endpoint}";
|
||||
}
|
||||
|
||||
public override async Task StopAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation("Connection pool manager stopping");
|
||||
|
||||
await base.StopAsync(cancellationToken);
|
||||
|
||||
// Dispose all pools
|
||||
var disposeTasks = _pools.Values.Select(p => p.DisposeAsync().AsTask());
|
||||
await Task.WhenAll(disposeTasks);
|
||||
|
||||
_pools.Clear();
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
var disposeTasks = _pools.Values.Select(p => p.DisposeAsync().AsTask());
|
||||
await Task.WhenAll(disposeTasks);
|
||||
|
||||
_pools.Clear();
|
||||
_globalLimiter.Dispose();
|
||||
|
||||
GC.SuppressFinalize(this);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Manages a pool of connections to a single endpoint.
|
||||
/// </summary>
|
||||
internal sealed class ConnectionPool : IAsyncDisposable
|
||||
{
|
||||
private readonly IConnectionFactory _connectionFactory;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly PoolConfig _config;
|
||||
private readonly ILogger _logger;
|
||||
private readonly ConcurrentQueue<PooledConnection> _availableConnections = new();
|
||||
private readonly ConcurrentDictionary<Guid, PooledConnection> _activeConnections = new();
|
||||
private readonly SemaphoreSlim _poolLimiter;
|
||||
private readonly object _statsLock = new();
|
||||
private int _totalCreated;
|
||||
private int _totalDestroyed;
|
||||
private bool _disposed;
|
||||
|
||||
public ConnectionPool(
|
||||
IConnectionFactory connectionFactory,
|
||||
TimeProvider timeProvider,
|
||||
PoolConfig config,
|
||||
ILogger logger)
|
||||
{
|
||||
_connectionFactory = connectionFactory;
|
||||
_timeProvider = timeProvider;
|
||||
_config = config;
|
||||
_logger = logger;
|
||||
_poolLimiter = new SemaphoreSlim(config.MaxConnections);
|
||||
}
|
||||
|
||||
public async Task<PooledConnection> AcquireAsync(CancellationToken ct)
|
||||
{
|
||||
ObjectDisposedException.ThrowIf(_disposed, this);
|
||||
|
||||
// Try to get an existing idle connection
|
||||
while (_availableConnections.TryDequeue(out var pooled))
|
||||
{
|
||||
if (IsConnectionValid(pooled))
|
||||
{
|
||||
pooled.LastUsedAt = _timeProvider.GetUtcNow();
|
||||
_activeConnections[pooled.Id] = pooled;
|
||||
|
||||
_logger.LogTrace(
|
||||
"Reusing connection {ConnectionId} for {Endpoint}",
|
||||
pooled.Id, _config.Endpoint);
|
||||
|
||||
return pooled;
|
||||
}
|
||||
|
||||
// Connection is stale, dispose it
|
||||
await DestroyConnectionAsync(pooled);
|
||||
}
|
||||
|
||||
// Need to create a new connection
|
||||
await _poolLimiter.WaitAsync(ct);
|
||||
|
||||
try
|
||||
{
|
||||
var connection = await CreateConnectionAsync(ct);
|
||||
_activeConnections[connection.Id] = connection;
|
||||
return connection;
|
||||
}
|
||||
catch
|
||||
{
|
||||
_poolLimiter.Release();
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public void Return(PooledConnection connection)
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
_ = DestroyConnectionAsync(connection);
|
||||
return;
|
||||
}
|
||||
|
||||
_activeConnections.TryRemove(connection.Id, out _);
|
||||
|
||||
if (IsConnectionValid(connection))
|
||||
{
|
||||
connection.LastUsedAt = _timeProvider.GetUtcNow();
|
||||
_availableConnections.Enqueue(connection);
|
||||
|
||||
_logger.LogTrace(
|
||||
"Returned connection {ConnectionId} to pool for {Endpoint}",
|
||||
connection.Id, _config.Endpoint);
|
||||
}
|
||||
else
|
||||
{
|
||||
_ = DestroyConnectionAsync(connection);
|
||||
}
|
||||
|
||||
_poolLimiter.Release();
|
||||
}
|
||||
|
||||
public async Task CleanupIdleAsync(CancellationToken ct)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var toRemove = new List<PooledConnection>();
|
||||
|
||||
// Collect idle connections that have exceeded timeout
|
||||
var snapshot = _availableConnections.ToArray();
|
||||
foreach (var connection in snapshot)
|
||||
{
|
||||
if (now - connection.LastUsedAt > _config.IdleTimeout)
|
||||
{
|
||||
toRemove.Add(connection);
|
||||
}
|
||||
}
|
||||
|
||||
// Rebuild queue without expired connections
|
||||
if (toRemove.Count > 0)
|
||||
{
|
||||
var validConnections = _availableConnections
|
||||
.Where(c => !toRemove.Contains(c))
|
||||
.ToList();
|
||||
|
||||
// Clear and re-add valid connections
|
||||
while (_availableConnections.TryDequeue(out _)) { }
|
||||
|
||||
foreach (var conn in validConnections)
|
||||
{
|
||||
_availableConnections.Enqueue(conn);
|
||||
}
|
||||
|
||||
// Destroy expired connections
|
||||
foreach (var connection in toRemove)
|
||||
{
|
||||
await DestroyConnectionAsync(connection);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Cleaned up {Count} idle connections for {Endpoint}",
|
||||
toRemove.Count, _config.Endpoint);
|
||||
}
|
||||
}
|
||||
|
||||
public SinglePoolStatistics GetStatistics()
|
||||
{
|
||||
return new SinglePoolStatistics
|
||||
{
|
||||
Endpoint = _config.Endpoint,
|
||||
ConnectionType = _config.ConnectionType,
|
||||
TotalConnections = _availableConnections.Count + _activeConnections.Count,
|
||||
ActiveConnections = _activeConnections.Count,
|
||||
IdleConnections = _availableConnections.Count,
|
||||
TotalCreated = _totalCreated,
|
||||
TotalDestroyed = _totalDestroyed,
|
||||
MaxConnections = _config.MaxConnections,
|
||||
AvailableCapacity = _poolLimiter.CurrentCount
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<PooledConnection> CreateConnectionAsync(CancellationToken ct)
|
||||
{
|
||||
var startTime = _timeProvider.GetUtcNow();
|
||||
|
||||
_logger.LogDebug(
|
||||
"Creating new connection for {Endpoint} ({Type})",
|
||||
_config.Endpoint, _config.ConnectionType);
|
||||
|
||||
using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(ct);
|
||||
timeoutCts.CancelAfter(_config.ConnectionTimeout);
|
||||
|
||||
var connection = await _connectionFactory.CreateAsync(
|
||||
_config.Endpoint,
|
||||
_config.ConnectionType,
|
||||
timeoutCts.Token);
|
||||
|
||||
var pooled = new PooledConnection
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
Connection = connection,
|
||||
Endpoint = _config.Endpoint,
|
||||
ConnectionType = _config.ConnectionType,
|
||||
CreatedAt = startTime,
|
||||
LastUsedAt = startTime
|
||||
};
|
||||
|
||||
Interlocked.Increment(ref _totalCreated);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Created connection {ConnectionId} for {Endpoint} in {Duration}ms",
|
||||
pooled.Id, _config.Endpoint,
|
||||
(_timeProvider.GetUtcNow() - startTime).TotalMilliseconds);
|
||||
|
||||
return pooled;
|
||||
}
|
||||
|
||||
private async Task DestroyConnectionAsync(PooledConnection connection)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogTrace(
|
||||
"Destroying connection {ConnectionId} for {Endpoint}",
|
||||
connection.Id, _config.Endpoint);
|
||||
|
||||
await connection.Connection.DisposeAsync();
|
||||
Interlocked.Increment(ref _totalDestroyed);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Error disposing connection {ConnectionId}",
|
||||
connection.Id);
|
||||
}
|
||||
}
|
||||
|
||||
private bool IsConnectionValid(PooledConnection connection)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Check if connection has exceeded max lifetime
|
||||
if (now - connection.CreatedAt > _config.MaxLifetime)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if connection has been idle too long
|
||||
if (now - connection.LastUsedAt > _config.IdleTimeout)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if underlying connection is still usable
|
||||
return connection.Connection.IsConnected;
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
// Dispose all active connections
|
||||
foreach (var connection in _activeConnections.Values)
|
||||
{
|
||||
await DestroyConnectionAsync(connection);
|
||||
}
|
||||
|
||||
// Dispose all available connections
|
||||
while (_availableConnections.TryDequeue(out var connection))
|
||||
{
|
||||
await DestroyConnectionAsync(connection);
|
||||
}
|
||||
|
||||
_poolLimiter.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lease for a pooled connection.
|
||||
/// </summary>
|
||||
internal sealed class ConnectionLease : IConnectionLease
|
||||
{
|
||||
private readonly PooledConnection _connection;
|
||||
private readonly ConnectionPool _pool;
|
||||
private readonly SemaphoreSlim _globalLimiter;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private bool _disposed;
|
||||
|
||||
public ConnectionLease(
|
||||
PooledConnection connection,
|
||||
ConnectionPool pool,
|
||||
SemaphoreSlim globalLimiter,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_connection = connection;
|
||||
_pool = pool;
|
||||
_globalLimiter = globalLimiter;
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
public IPooledConnection Connection => _connection.Connection;
|
||||
public Guid ConnectionId => _connection.Id;
|
||||
public string Endpoint => _connection.Endpoint;
|
||||
public ConnectionType ConnectionType => _connection.ConnectionType;
|
||||
public DateTimeOffset AcquiredAt { get; } = TimeProvider.System.GetUtcNow();
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
_pool.Return(_connection);
|
||||
_globalLimiter.Release();
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for the connection pool manager.
|
||||
/// </summary>
|
||||
public sealed record ConnectionPoolConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum total connections across all pools.
|
||||
/// </summary>
|
||||
public int MaxTotalConnections { get; init; } = 200;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum connections per pool (default).
|
||||
/// </summary>
|
||||
public int MaxConnectionsPerPool { get; init; } = 20;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum registry connections per endpoint.
|
||||
/// </summary>
|
||||
public int MaxRegistryConnectionsPerEndpoint { get; init; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum agent connections per endpoint.
|
||||
/// </summary>
|
||||
public int MaxAgentConnectionsPerEndpoint { get; init; } = 5;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum database connections per endpoint.
|
||||
/// </summary>
|
||||
public int MaxDatabaseConnectionsPerEndpoint { get; init; } = 25;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum connections to maintain per pool.
|
||||
/// </summary>
|
||||
public int MinConnectionsPerPool { get; init; } = 1;
|
||||
|
||||
/// <summary>
|
||||
/// Idle timeout before a connection is closed.
|
||||
/// </summary>
|
||||
public TimeSpan IdleTimeout { get; init; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for creating new connections.
|
||||
/// </summary>
|
||||
public TimeSpan ConnectionTimeout { get; init; } = TimeSpan.FromSeconds(30);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum lifetime of a connection before forced recycling.
|
||||
/// </summary>
|
||||
public TimeSpan MaxConnectionLifetime { get; init; } = TimeSpan.FromMinutes(30);
|
||||
|
||||
/// <summary>
|
||||
/// Interval for cleanup of idle connections.
|
||||
/// </summary>
|
||||
public TimeSpan CleanupInterval { get; init; } = TimeSpan.FromMinutes(1);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for a single pool.
|
||||
/// </summary>
|
||||
internal sealed record PoolConfig
|
||||
{
|
||||
public required string Endpoint { get; init; }
|
||||
public required ConnectionType ConnectionType { get; init; }
|
||||
public required int MaxConnections { get; init; }
|
||||
public required int MinConnections { get; init; }
|
||||
public required TimeSpan IdleTimeout { get; init; }
|
||||
public required TimeSpan ConnectionTimeout { get; init; }
|
||||
public required TimeSpan MaxLifetime { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A pooled connection with metadata.
|
||||
/// </summary>
|
||||
internal sealed record PooledConnection
|
||||
{
|
||||
public required Guid Id { get; init; }
|
||||
public required IPooledConnection Connection { get; init; }
|
||||
public required string Endpoint { get; init; }
|
||||
public required ConnectionType ConnectionType { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset LastUsedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics for all connection pools.
|
||||
/// </summary>
|
||||
public sealed record ConnectionPoolStatistics
|
||||
{
|
||||
public required int TotalPools { get; init; }
|
||||
public required int TotalConnections { get; init; }
|
||||
public required int ActiveConnections { get; init; }
|
||||
public required int IdleConnections { get; init; }
|
||||
public required ImmutableArray<SinglePoolStatistics> PoolDetails { get; init; }
|
||||
public required int GlobalCapacityUsed { get; init; }
|
||||
public required int GlobalCapacityAvailable { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics for a single pool.
|
||||
/// </summary>
|
||||
public sealed record SinglePoolStatistics
|
||||
{
|
||||
public required string Endpoint { get; init; }
|
||||
public required ConnectionType ConnectionType { get; init; }
|
||||
public required int TotalConnections { get; init; }
|
||||
public required int ActiveConnections { get; init; }
|
||||
public required int IdleConnections { get; init; }
|
||||
public required int TotalCreated { get; init; }
|
||||
public required int TotalDestroyed { get; init; }
|
||||
public required int MaxConnections { get; init; }
|
||||
public required int AvailableCapacity { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of connections that can be pooled.
|
||||
/// </summary>
|
||||
public enum ConnectionType
|
||||
{
|
||||
Registry,
|
||||
Agent,
|
||||
Database,
|
||||
Storage,
|
||||
Api
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Lease for a connection from the pool.
|
||||
/// </summary>
|
||||
public interface IConnectionLease : IAsyncDisposable
|
||||
{
|
||||
IPooledConnection Connection { get; }
|
||||
Guid ConnectionId { get; }
|
||||
string Endpoint { get; }
|
||||
ConnectionType ConnectionType { get; }
|
||||
DateTimeOffset AcquiredAt { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A pooled connection that can be reused.
|
||||
/// </summary>
|
||||
public interface IPooledConnection : IAsyncDisposable
|
||||
{
|
||||
bool IsConnected { get; }
|
||||
Task<bool> ValidateAsync(CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating connections.
|
||||
/// </summary>
|
||||
public interface IConnectionFactory
|
||||
{
|
||||
Task<IPooledConnection> CreateAsync(
|
||||
string endpoint,
|
||||
ConnectionType connectionType,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,852 @@
|
||||
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using System.Threading.Channels;
|
||||
|
||||
namespace StellaOps.ReleaseOrchestrator.Performance.Prefetch;
|
||||
|
||||
/// <summary>
|
||||
/// Predictive data prefetching service for gate inputs, scan results, and attestation data.
|
||||
/// Uses access pattern analysis to anticipate needed data before it is requested.
|
||||
/// </summary>
|
||||
public sealed class DataPrefetcher : BackgroundService
|
||||
{
|
||||
private readonly IPrefetchDataProvider _dataProvider;
|
||||
private readonly IPrefetchCache _cache;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly DataPrefetcherConfig _config;
|
||||
private readonly ILogger<DataPrefetcher> _logger;
|
||||
private readonly Channel<PrefetchRequest> _requestChannel;
|
||||
private readonly ConcurrentDictionary<string, AccessPattern> _accessPatterns = new();
|
||||
private readonly ConcurrentDictionary<string, PrefetchJob> _activeJobs = new();
|
||||
|
||||
public DataPrefetcher(
|
||||
IPrefetchDataProvider dataProvider,
|
||||
IPrefetchCache cache,
|
||||
TimeProvider timeProvider,
|
||||
DataPrefetcherConfig config,
|
||||
ILogger<DataPrefetcher> logger)
|
||||
{
|
||||
_dataProvider = dataProvider;
|
||||
_cache = cache;
|
||||
_timeProvider = timeProvider;
|
||||
_config = config;
|
||||
_logger = logger;
|
||||
_requestChannel = Channel.CreateBounded<PrefetchRequest>(new BoundedChannelOptions(5000)
|
||||
{
|
||||
FullMode = BoundedChannelFullMode.DropOldest
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Enqueues a predictive prefetch request.
|
||||
/// </summary>
|
||||
public async Task<PrefetchQueueResult> EnqueueAsync(
|
||||
PrefetchRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
request = request with
|
||||
{
|
||||
Id = request.Id == Guid.Empty ? Guid.NewGuid() : request.Id,
|
||||
RequestedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
await _requestChannel.Writer.WriteAsync(request, ct);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Enqueued prefetch request {RequestId} for {DataType}",
|
||||
request.Id, request.DataType);
|
||||
|
||||
return new PrefetchQueueResult
|
||||
{
|
||||
RequestId = request.Id,
|
||||
Queued = true,
|
||||
EstimatedCompletionTime = EstimateCompletion(request)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Triggers prefetch for anticipated gate evaluation inputs.
|
||||
/// </summary>
|
||||
public async Task<PrefetchResult> PrefetchForGateEvaluationAsync(
|
||||
Guid promotionId,
|
||||
IReadOnlyList<Guid> gateIds,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var startTime = _timeProvider.GetUtcNow();
|
||||
var prefetchedItems = new List<PrefetchedItem>();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Prefetching data for promotion {PromotionId} with {GateCount} gates",
|
||||
promotionId, gateIds.Count);
|
||||
|
||||
// Prefetch gate configurations
|
||||
var gateConfigTasks = gateIds.Select(async gateId =>
|
||||
{
|
||||
var key = $"gate-config:{gateId}";
|
||||
if (await _cache.ExistsAsync(key, ct))
|
||||
{
|
||||
return new PrefetchedItem
|
||||
{
|
||||
Key = key,
|
||||
DataType = PrefetchDataType.GateConfig,
|
||||
WasCached = true,
|
||||
Duration = TimeSpan.Zero
|
||||
};
|
||||
}
|
||||
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
var data = await _dataProvider.GetGateConfigAsync(gateId, ct);
|
||||
sw.Stop();
|
||||
|
||||
if (data is not null)
|
||||
{
|
||||
await _cache.SetAsync(key, data, _config.GateConfigTtl, ct);
|
||||
}
|
||||
|
||||
return new PrefetchedItem
|
||||
{
|
||||
Key = key,
|
||||
DataType = PrefetchDataType.GateConfig,
|
||||
WasCached = false,
|
||||
Duration = sw.Elapsed,
|
||||
Success = data is not null
|
||||
};
|
||||
});
|
||||
|
||||
var gateConfigs = await Task.WhenAll(gateConfigTasks);
|
||||
prefetchedItems.AddRange(gateConfigs);
|
||||
|
||||
// Record access pattern for future predictions
|
||||
RecordAccessPattern(promotionId, gateIds);
|
||||
|
||||
// Predict and prefetch related data based on patterns
|
||||
var predictedData = await PrefetchPredictedDataAsync(promotionId, gateIds, ct);
|
||||
prefetchedItems.AddRange(predictedData);
|
||||
|
||||
var duration = _timeProvider.GetUtcNow() - startTime;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Prefetched {Count} items for promotion {PromotionId} in {Duration}ms",
|
||||
prefetchedItems.Count, promotionId, duration.TotalMilliseconds);
|
||||
|
||||
return new PrefetchResult
|
||||
{
|
||||
PromotionId = promotionId,
|
||||
Items = prefetchedItems.ToImmutableArray(),
|
||||
Duration = duration,
|
||||
CacheHits = prefetchedItems.Count(i => i.WasCached),
|
||||
CacheMisses = prefetchedItems.Count(i => !i.WasCached)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prefetches scan results for specified artifacts.
|
||||
/// </summary>
|
||||
public async Task<PrefetchResult> PrefetchScanResultsAsync(
|
||||
IReadOnlyList<string> artifactDigests,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var startTime = _timeProvider.GetUtcNow();
|
||||
var prefetchedItems = new List<PrefetchedItem>();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Prefetching scan results for {Count} artifacts",
|
||||
artifactDigests.Count);
|
||||
|
||||
var tasks = artifactDigests.Select(async digest =>
|
||||
{
|
||||
var key = $"scan-result:{digest}";
|
||||
if (await _cache.ExistsAsync(key, ct))
|
||||
{
|
||||
return new PrefetchedItem
|
||||
{
|
||||
Key = key,
|
||||
DataType = PrefetchDataType.ScanResult,
|
||||
WasCached = true,
|
||||
Duration = TimeSpan.Zero
|
||||
};
|
||||
}
|
||||
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
var data = await _dataProvider.GetScanResultAsync(digest, ct);
|
||||
sw.Stop();
|
||||
|
||||
if (data is not null)
|
||||
{
|
||||
await _cache.SetAsync(key, data, _config.ScanResultTtl, ct);
|
||||
}
|
||||
|
||||
return new PrefetchedItem
|
||||
{
|
||||
Key = key,
|
||||
DataType = PrefetchDataType.ScanResult,
|
||||
WasCached = false,
|
||||
Duration = sw.Elapsed,
|
||||
Success = data is not null
|
||||
};
|
||||
});
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
prefetchedItems.AddRange(results);
|
||||
|
||||
var duration = _timeProvider.GetUtcNow() - startTime;
|
||||
|
||||
return new PrefetchResult
|
||||
{
|
||||
Items = prefetchedItems.ToImmutableArray(),
|
||||
Duration = duration,
|
||||
CacheHits = prefetchedItems.Count(i => i.WasCached),
|
||||
CacheMisses = prefetchedItems.Count(i => !i.WasCached)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prefetches attestation data for specified artifacts.
|
||||
/// </summary>
|
||||
public async Task<PrefetchResult> PrefetchAttestationsAsync(
|
||||
IReadOnlyList<string> artifactDigests,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var startTime = _timeProvider.GetUtcNow();
|
||||
var prefetchedItems = new List<PrefetchedItem>();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Prefetching attestations for {Count} artifacts",
|
||||
artifactDigests.Count);
|
||||
|
||||
var tasks = artifactDigests.Select(async digest =>
|
||||
{
|
||||
var key = $"attestation:{digest}";
|
||||
if (await _cache.ExistsAsync(key, ct))
|
||||
{
|
||||
return new PrefetchedItem
|
||||
{
|
||||
Key = key,
|
||||
DataType = PrefetchDataType.Attestation,
|
||||
WasCached = true,
|
||||
Duration = TimeSpan.Zero
|
||||
};
|
||||
}
|
||||
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
var data = await _dataProvider.GetAttestationAsync(digest, ct);
|
||||
sw.Stop();
|
||||
|
||||
if (data is not null)
|
||||
{
|
||||
await _cache.SetAsync(key, data, _config.AttestationTtl, ct);
|
||||
}
|
||||
|
||||
return new PrefetchedItem
|
||||
{
|
||||
Key = key,
|
||||
DataType = PrefetchDataType.Attestation,
|
||||
WasCached = false,
|
||||
Duration = sw.Elapsed,
|
||||
Success = data is not null
|
||||
};
|
||||
});
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
prefetchedItems.AddRange(results);
|
||||
|
||||
var duration = _timeProvider.GetUtcNow() - startTime;
|
||||
|
||||
return new PrefetchResult
|
||||
{
|
||||
Items = prefetchedItems.ToImmutableArray(),
|
||||
Duration = duration,
|
||||
CacheHits = prefetchedItems.Count(i => i.WasCached),
|
||||
CacheMisses = prefetchedItems.Count(i => !i.WasCached)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Warms the cache for a release pipeline.
|
||||
/// </summary>
|
||||
public async Task<PrefetchResult> WarmCacheForPipelineAsync(
|
||||
Guid pipelineId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var startTime = _timeProvider.GetUtcNow();
|
||||
var prefetchedItems = new List<PrefetchedItem>();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Warming cache for pipeline {PipelineId}",
|
||||
pipelineId);
|
||||
|
||||
// Get pipeline metadata to determine what to prefetch
|
||||
var pipelineData = await _dataProvider.GetPipelineMetadataAsync(pipelineId, ct);
|
||||
if (pipelineData is null)
|
||||
{
|
||||
_logger.LogWarning("Pipeline {PipelineId} not found", pipelineId);
|
||||
return new PrefetchResult
|
||||
{
|
||||
PipelineId = pipelineId,
|
||||
Items = [],
|
||||
Duration = TimeSpan.Zero,
|
||||
CacheHits = 0,
|
||||
CacheMisses = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Prefetch gate configurations for all stages
|
||||
var gateResults = await PrefetchForGateEvaluationAsync(
|
||||
pipelineId,
|
||||
pipelineData.GateIds,
|
||||
ct);
|
||||
prefetchedItems.AddRange(gateResults.Items);
|
||||
|
||||
// Prefetch scan results if artifacts are known
|
||||
if (pipelineData.ArtifactDigests.Length > 0)
|
||||
{
|
||||
var scanResults = await PrefetchScanResultsAsync(
|
||||
pipelineData.ArtifactDigests,
|
||||
ct);
|
||||
prefetchedItems.AddRange(scanResults.Items);
|
||||
|
||||
var attestations = await PrefetchAttestationsAsync(
|
||||
pipelineData.ArtifactDigests,
|
||||
ct);
|
||||
prefetchedItems.AddRange(attestations.Items);
|
||||
}
|
||||
|
||||
var duration = _timeProvider.GetUtcNow() - startTime;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Warmed cache for pipeline {PipelineId}: {Count} items in {Duration}ms",
|
||||
pipelineId, prefetchedItems.Count, duration.TotalMilliseconds);
|
||||
|
||||
return new PrefetchResult
|
||||
{
|
||||
PipelineId = pipelineId,
|
||||
Items = prefetchedItems.ToImmutableArray(),
|
||||
Duration = duration,
|
||||
CacheHits = prefetchedItems.Count(i => i.WasCached),
|
||||
CacheMisses = prefetchedItems.Count(i => !i.WasCached)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets prefetch statistics for monitoring.
|
||||
/// </summary>
|
||||
public PrefetchStatistics GetStatistics()
|
||||
{
|
||||
return new PrefetchStatistics
|
||||
{
|
||||
ActiveJobs = _activeJobs.Count,
|
||||
PendingRequests = _requestChannel.Reader.Count,
|
||||
AccessPatternsTracked = _accessPatterns.Count,
|
||||
Timestamp = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Data prefetcher starting with concurrency {Concurrency}",
|
||||
_config.MaxConcurrentPrefetches);
|
||||
|
||||
await foreach (var request in _requestChannel.Reader.ReadAllAsync(stoppingToken))
|
||||
{
|
||||
try
|
||||
{
|
||||
await ProcessPrefetchRequestAsync(request, stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex,
|
||||
"Error processing prefetch request {RequestId}",
|
||||
request.Id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ProcessPrefetchRequestAsync(
|
||||
PrefetchRequest request,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var job = new PrefetchJob
|
||||
{
|
||||
RequestId = request.Id,
|
||||
StartedAt = _timeProvider.GetUtcNow(),
|
||||
Status = PrefetchJobStatus.InProgress
|
||||
};
|
||||
|
||||
_activeJobs[request.Id.ToString()] = job;
|
||||
|
||||
try
|
||||
{
|
||||
switch (request.DataType)
|
||||
{
|
||||
case PrefetchDataType.GateConfig:
|
||||
await PrefetchGateConfigAsync(request, ct);
|
||||
break;
|
||||
|
||||
case PrefetchDataType.ScanResult:
|
||||
await PrefetchScanResultAsync(request, ct);
|
||||
break;
|
||||
|
||||
case PrefetchDataType.Attestation:
|
||||
await PrefetchAttestationAsync(request, ct);
|
||||
break;
|
||||
|
||||
case PrefetchDataType.Pipeline:
|
||||
await WarmCacheForPipelineAsync(
|
||||
Guid.Parse(request.TargetId),
|
||||
ct);
|
||||
break;
|
||||
|
||||
default:
|
||||
_logger.LogWarning(
|
||||
"Unknown prefetch data type: {DataType}",
|
||||
request.DataType);
|
||||
break;
|
||||
}
|
||||
|
||||
job = job with
|
||||
{
|
||||
Status = PrefetchJobStatus.Completed,
|
||||
CompletedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
job = job with
|
||||
{
|
||||
Status = PrefetchJobStatus.Failed,
|
||||
CompletedAt = _timeProvider.GetUtcNow(),
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
finally
|
||||
{
|
||||
_activeJobs[request.Id.ToString()] = job;
|
||||
|
||||
// Clean up completed jobs after a delay
|
||||
_ = Task.Delay(TimeSpan.FromMinutes(5), ct)
|
||||
.ContinueWith(_ => _activeJobs.TryRemove(request.Id.ToString(), out _), ct);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task PrefetchGateConfigAsync(PrefetchRequest request, CancellationToken ct)
|
||||
{
|
||||
if (!Guid.TryParse(request.TargetId, out var gateId))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var key = $"gate-config:{gateId}";
|
||||
if (await _cache.ExistsAsync(key, ct))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var data = await _dataProvider.GetGateConfigAsync(gateId, ct);
|
||||
if (data is not null)
|
||||
{
|
||||
await _cache.SetAsync(key, data, _config.GateConfigTtl, ct);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task PrefetchScanResultAsync(PrefetchRequest request, CancellationToken ct)
|
||||
{
|
||||
var key = $"scan-result:{request.TargetId}";
|
||||
if (await _cache.ExistsAsync(key, ct))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var data = await _dataProvider.GetScanResultAsync(request.TargetId, ct);
|
||||
if (data is not null)
|
||||
{
|
||||
await _cache.SetAsync(key, data, _config.ScanResultTtl, ct);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task PrefetchAttestationAsync(PrefetchRequest request, CancellationToken ct)
|
||||
{
|
||||
var key = $"attestation:{request.TargetId}";
|
||||
if (await _cache.ExistsAsync(key, ct))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var data = await _dataProvider.GetAttestationAsync(request.TargetId, ct);
|
||||
if (data is not null)
|
||||
{
|
||||
await _cache.SetAsync(key, data, _config.AttestationTtl, ct);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<PrefetchedItem>> PrefetchPredictedDataAsync(
|
||||
Guid promotionId,
|
||||
IReadOnlyList<Guid> gateIds,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var items = new List<PrefetchedItem>();
|
||||
|
||||
// Use access patterns to predict what data will be needed
|
||||
var pattern = GetOrCreateAccessPattern($"promotion:{promotionId}");
|
||||
var predictions = PredictRequiredData(pattern, gateIds);
|
||||
|
||||
foreach (var prediction in predictions)
|
||||
{
|
||||
if (prediction.Confidence >= _config.PredictionConfidenceThreshold)
|
||||
{
|
||||
var prefetchRequest = new PrefetchRequest
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
DataType = prediction.DataType,
|
||||
TargetId = prediction.TargetId,
|
||||
Priority = PrefetchPriority.Predicted
|
||||
};
|
||||
|
||||
// Don't await - let predictions run in background
|
||||
_ = EnqueueAsync(prefetchRequest, ct);
|
||||
|
||||
items.Add(new PrefetchedItem
|
||||
{
|
||||
Key = $"{prediction.DataType}:{prediction.TargetId}",
|
||||
DataType = prediction.DataType,
|
||||
WasCached = false,
|
||||
IsPredicted = true,
|
||||
PredictionConfidence = prediction.Confidence
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
private void RecordAccessPattern(Guid promotionId, IReadOnlyList<Guid> gateIds)
|
||||
{
|
||||
var patternKey = $"promotion:{promotionId}";
|
||||
var pattern = GetOrCreateAccessPattern(patternKey);
|
||||
|
||||
pattern.RecordAccess(gateIds, _timeProvider.GetUtcNow());
|
||||
|
||||
_accessPatterns[patternKey] = pattern;
|
||||
}
|
||||
|
||||
private AccessPattern GetOrCreateAccessPattern(string key)
|
||||
{
|
||||
return _accessPatterns.GetOrAdd(key, _ => new AccessPattern
|
||||
{
|
||||
PatternKey = key,
|
||||
CreatedAt = _timeProvider.GetUtcNow()
|
||||
});
|
||||
}
|
||||
|
||||
private IReadOnlyList<DataPrediction> PredictRequiredData(
|
||||
AccessPattern pattern,
|
||||
IReadOnlyList<Guid> currentGates)
|
||||
{
|
||||
var predictions = new List<DataPrediction>();
|
||||
|
||||
// Analyze pattern to predict which scan results and attestations
|
||||
// are likely to be needed based on historical access
|
||||
foreach (var frequentGate in pattern.FrequentGates.Take(10))
|
||||
{
|
||||
// If this gate was frequently accessed with scan results, predict them
|
||||
if (pattern.GateToArtifactCorrelation.TryGetValue(
|
||||
frequentGate, out var artifacts))
|
||||
{
|
||||
foreach (var artifact in artifacts.Take(5))
|
||||
{
|
||||
predictions.Add(new DataPrediction
|
||||
{
|
||||
DataType = PrefetchDataType.ScanResult,
|
||||
TargetId = artifact,
|
||||
Confidence = CalculateConfidence(pattern, frequentGate, artifact)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return predictions;
|
||||
}
|
||||
|
||||
private double CalculateConfidence(
|
||||
AccessPattern pattern,
|
||||
Guid gateId,
|
||||
string artifactId)
|
||||
{
|
||||
// Simple confidence calculation based on access frequency
|
||||
var accessCount = pattern.GetAccessCount(gateId, artifactId);
|
||||
var totalAccesses = pattern.TotalAccesses;
|
||||
|
||||
if (totalAccesses == 0)
|
||||
{
|
||||
return 0.0;
|
||||
}
|
||||
|
||||
return Math.Min(1.0, (double)accessCount / totalAccesses * 10);
|
||||
}
|
||||
|
||||
private DateTimeOffset EstimateCompletion(PrefetchRequest request)
|
||||
{
|
||||
// Estimate based on data type and current load
|
||||
var estimatedDuration = request.DataType switch
|
||||
{
|
||||
PrefetchDataType.GateConfig => TimeSpan.FromMilliseconds(50),
|
||||
PrefetchDataType.ScanResult => TimeSpan.FromMilliseconds(200),
|
||||
PrefetchDataType.Attestation => TimeSpan.FromMilliseconds(150),
|
||||
PrefetchDataType.Pipeline => TimeSpan.FromSeconds(5),
|
||||
_ => TimeSpan.FromMilliseconds(100)
|
||||
};
|
||||
|
||||
var queueDelay = TimeSpan.FromMilliseconds(
|
||||
_requestChannel.Reader.Count * 10);
|
||||
|
||||
return _timeProvider.GetUtcNow() + queueDelay + estimatedDuration;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for the data prefetcher.
|
||||
/// </summary>
|
||||
public sealed record DataPrefetcherConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum concurrent prefetch operations.
|
||||
/// </summary>
|
||||
public int MaxConcurrentPrefetches { get; init; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Cache TTL for gate configurations.
|
||||
/// </summary>
|
||||
public TimeSpan GateConfigTtl { get; init; } = TimeSpan.FromMinutes(10);
|
||||
|
||||
/// <summary>
|
||||
/// Cache TTL for scan results.
|
||||
/// </summary>
|
||||
public TimeSpan ScanResultTtl { get; init; } = TimeSpan.FromMinutes(15);
|
||||
|
||||
/// <summary>
|
||||
/// Cache TTL for attestations.
|
||||
/// </summary>
|
||||
public TimeSpan AttestationTtl { get; init; } = TimeSpan.FromMinutes(30);
|
||||
|
||||
/// <summary>
|
||||
/// Minimum confidence threshold for predictive prefetching.
|
||||
/// </summary>
|
||||
public double PredictionConfidenceThreshold { get; init; } = 0.6;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum access patterns to track.
|
||||
/// </summary>
|
||||
public int MaxAccessPatterns { get; init; } = 1000;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to prefetch data.
|
||||
/// </summary>
|
||||
public sealed record PrefetchRequest
|
||||
{
|
||||
public Guid Id { get; init; }
|
||||
public required PrefetchDataType DataType { get; init; }
|
||||
public required string TargetId { get; init; }
|
||||
public PrefetchPriority Priority { get; init; } = PrefetchPriority.Normal;
|
||||
public DateTimeOffset RequestedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of enqueuing a prefetch request.
|
||||
/// </summary>
|
||||
public sealed record PrefetchQueueResult
|
||||
{
|
||||
public required Guid RequestId { get; init; }
|
||||
public required bool Queued { get; init; }
|
||||
public DateTimeOffset EstimatedCompletionTime { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a prefetch operation.
|
||||
/// </summary>
|
||||
public sealed record PrefetchResult
|
||||
{
|
||||
public Guid? PromotionId { get; init; }
|
||||
public Guid? PipelineId { get; init; }
|
||||
public required ImmutableArray<PrefetchedItem> Items { get; init; }
|
||||
public required TimeSpan Duration { get; init; }
|
||||
public required int CacheHits { get; init; }
|
||||
public required int CacheMisses { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single prefetched item.
|
||||
/// </summary>
|
||||
public sealed record PrefetchedItem
|
||||
{
|
||||
public required string Key { get; init; }
|
||||
public required PrefetchDataType DataType { get; init; }
|
||||
public required bool WasCached { get; init; }
|
||||
public TimeSpan Duration { get; init; }
|
||||
public bool Success { get; init; } = true;
|
||||
public bool IsPredicted { get; init; }
|
||||
public double PredictionConfidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of data that can be prefetched.
|
||||
/// </summary>
|
||||
public enum PrefetchDataType
|
||||
{
|
||||
GateConfig,
|
||||
ScanResult,
|
||||
Attestation,
|
||||
Pipeline,
|
||||
Environment,
|
||||
Policy
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Priority levels for prefetch requests.
|
||||
/// </summary>
|
||||
public enum PrefetchPriority
|
||||
{
|
||||
Low,
|
||||
Normal,
|
||||
High,
|
||||
Predicted
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Status of a prefetch job.
|
||||
/// </summary>
|
||||
public enum PrefetchJobStatus
|
||||
{
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
Failed
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Active prefetch job tracking.
|
||||
/// </summary>
|
||||
public sealed record PrefetchJob
|
||||
{
|
||||
public required Guid RequestId { get; init; }
|
||||
public required DateTimeOffset StartedAt { get; init; }
|
||||
public DateTimeOffset? CompletedAt { get; init; }
|
||||
public required PrefetchJobStatus Status { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about prefetch operations.
|
||||
/// </summary>
|
||||
public sealed record PrefetchStatistics
|
||||
{
|
||||
public required int ActiveJobs { get; init; }
|
||||
public required int PendingRequests { get; init; }
|
||||
public required int AccessPatternsTracked { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prediction for data that might be needed.
|
||||
/// </summary>
|
||||
public sealed record DataPrediction
|
||||
{
|
||||
public required PrefetchDataType DataType { get; init; }
|
||||
public required string TargetId { get; init; }
|
||||
public required double Confidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tracks access patterns for predictive prefetching.
|
||||
/// </summary>
|
||||
public sealed class AccessPattern
|
||||
{
|
||||
private readonly ConcurrentDictionary<Guid, int> _gateAccessCounts = new();
|
||||
private readonly ConcurrentDictionary<(Guid, string), int> _correlationCounts = new();
|
||||
private int _totalAccesses;
|
||||
|
||||
public required string PatternKey { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset LastAccessedAt { get; private set; }
|
||||
public int TotalAccesses => _totalAccesses;
|
||||
|
||||
public IEnumerable<Guid> FrequentGates =>
|
||||
_gateAccessCounts
|
||||
.OrderByDescending(kv => kv.Value)
|
||||
.Select(kv => kv.Key);
|
||||
|
||||
public ConcurrentDictionary<Guid, ImmutableArray<string>> GateToArtifactCorrelation { get; } = new();
|
||||
|
||||
public void RecordAccess(IReadOnlyList<Guid> gateIds, DateTimeOffset timestamp)
|
||||
{
|
||||
LastAccessedAt = timestamp;
|
||||
Interlocked.Increment(ref _totalAccesses);
|
||||
|
||||
foreach (var gateId in gateIds)
|
||||
{
|
||||
_gateAccessCounts.AddOrUpdate(gateId, 1, (_, count) => count + 1);
|
||||
}
|
||||
}
|
||||
|
||||
public void RecordCorrelation(Guid gateId, string artifactId)
|
||||
{
|
||||
var key = (gateId, artifactId);
|
||||
_correlationCounts.AddOrUpdate(key, 1, (_, count) => count + 1);
|
||||
|
||||
GateToArtifactCorrelation.AddOrUpdate(
|
||||
gateId,
|
||||
[artifactId],
|
||||
(_, existing) => existing.Contains(artifactId)
|
||||
? existing
|
||||
: existing.Add(artifactId));
|
||||
}
|
||||
|
||||
public int GetAccessCount(Guid gateId, string artifactId)
|
||||
{
|
||||
return _correlationCounts.TryGetValue((gateId, artifactId), out var count)
|
||||
? count
|
||||
: 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pipeline metadata for cache warming.
|
||||
/// </summary>
|
||||
public sealed record PipelineMetadata
|
||||
{
|
||||
public required Guid PipelineId { get; init; }
|
||||
public required ImmutableArray<Guid> GateIds { get; init; }
|
||||
public required ImmutableArray<string> ArtifactDigests { get; init; }
|
||||
public required ImmutableArray<Guid> EnvironmentIds { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for providing data to prefetch.
|
||||
/// </summary>
|
||||
public interface IPrefetchDataProvider
|
||||
{
|
||||
Task<object?> GetGateConfigAsync(Guid gateId, CancellationToken ct = default);
|
||||
Task<object?> GetScanResultAsync(string digest, CancellationToken ct = default);
|
||||
Task<object?> GetAttestationAsync(string digest, CancellationToken ct = default);
|
||||
Task<PipelineMetadata?> GetPipelineMetadataAsync(Guid pipelineId, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for the prefetch cache.
|
||||
/// </summary>
|
||||
public interface IPrefetchCache
|
||||
{
|
||||
Task<bool> ExistsAsync(string key, CancellationToken ct = default);
|
||||
Task<T?> GetAsync<T>(string key, CancellationToken ct = default) where T : class;
|
||||
Task SetAsync(string key, object value, TimeSpan ttl, CancellationToken ct = default);
|
||||
Task RemoveAsync(string key, CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,481 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.ReleaseOrchestrator.Performance.Baseline;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.ReleaseOrchestrator.Performance.Tests;
|
||||
|
||||
public sealed class BaselineTrackerTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly InMemoryBaselineStore _store;
|
||||
private readonly BaselineTrackerConfig _config;
|
||||
|
||||
public BaselineTrackerTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
_store = new InMemoryBaselineStore();
|
||||
_config = new BaselineTrackerConfig
|
||||
{
|
||||
WindowSize = 100,
|
||||
WindowDuration = TimeSpan.FromMinutes(10),
|
||||
MinObservationsForBaseline = 10,
|
||||
MinObservationsForAnalysis = 5,
|
||||
BaselineValidity = TimeSpan.FromDays(7),
|
||||
SmoothingFactor = 0.3,
|
||||
SignificanceThreshold = 2.0,
|
||||
RegressionThresholdPercent = 10.0,
|
||||
SevereRegressionThresholdPercent = 25.0,
|
||||
ImprovementThresholdPercent = 10.0
|
||||
};
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordMetricAsync_ShouldAddObservation()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metric = new PerformanceMetric
|
||||
{
|
||||
MetricName = "gate.evaluation.duration",
|
||||
Value = 150.0,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
};
|
||||
|
||||
// Act
|
||||
await tracker.RecordMetricAsync(metric);
|
||||
|
||||
// Assert
|
||||
var stats = tracker.GetStatistics();
|
||||
stats.TotalObservations.Should().Be(1);
|
||||
stats.ActiveMetrics.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EstablishBaselineAsync_WithSufficientObservations_ShouldCreateBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Record enough observations
|
||||
for (int i = 0; i < 15; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 100 + i,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act
|
||||
var baseline = await tracker.EstablishBaselineAsync(metricName);
|
||||
|
||||
// Assert
|
||||
baseline.Should().NotBeNull();
|
||||
baseline.MetricName.Should().Be(metricName);
|
||||
baseline.SampleCount.Should().Be(15);
|
||||
baseline.Mean.Should().BeApproximately(107, 0.1);
|
||||
baseline.Min.Should().Be(100);
|
||||
baseline.Max.Should().Be(114);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EstablishBaselineAsync_WithInsufficientObservations_ShouldThrow()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Record too few observations
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 100 + i,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(
|
||||
() => tracker.EstablishBaselineAsync(metricName));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzeAsync_WithNoBaseline_ShouldReturnNoBaselineStatus()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Act
|
||||
var analysis = await tracker.AnalyzeAsync(metricName);
|
||||
|
||||
// Assert
|
||||
analysis.HasBaseline.Should().BeFalse();
|
||||
analysis.Status.Should().Be(RegressionStatus.NoBaseline);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzeAsync_WithInsufficientData_ShouldReturnInsufficientDataStatus()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Create a baseline in the store
|
||||
var baseline = new PerformanceBaseline
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = metricName,
|
||||
Mean = 100,
|
||||
Median = 100,
|
||||
P90 = 120,
|
||||
P95 = 130,
|
||||
P99 = 150,
|
||||
Min = 80,
|
||||
Max = 160,
|
||||
StandardDeviation = 15,
|
||||
SampleCount = 100,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + TimeSpan.FromDays(7)
|
||||
};
|
||||
await _store.SaveBaselineAsync(baseline);
|
||||
|
||||
// Record too few new observations
|
||||
for (int i = 0; i < 3; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 100,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act
|
||||
var analysis = await tracker.AnalyzeAsync(metricName);
|
||||
|
||||
// Assert
|
||||
analysis.HasBaseline.Should().BeTrue();
|
||||
analysis.Status.Should().Be(RegressionStatus.InsufficientData);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzeAsync_WithNormalMetrics_ShouldReturnNormalStatus()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Create a baseline
|
||||
var baseline = new PerformanceBaseline
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = metricName,
|
||||
Mean = 100,
|
||||
Median = 100,
|
||||
P90 = 120,
|
||||
P95 = 130,
|
||||
P99 = 150,
|
||||
Min = 80,
|
||||
Max = 160,
|
||||
StandardDeviation = 15,
|
||||
SampleCount = 100,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + TimeSpan.FromDays(7)
|
||||
};
|
||||
await _store.SaveBaselineAsync(baseline);
|
||||
|
||||
// Record normal observations (similar to baseline)
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 100 + (i % 3), // Slight variation
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act
|
||||
var analysis = await tracker.AnalyzeAsync(metricName);
|
||||
|
||||
// Assert
|
||||
analysis.Status.Should().Be(RegressionStatus.Normal);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzeAsync_WithRegressionMetrics_ShouldReturnRegressionStatus()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Create a baseline
|
||||
var baseline = new PerformanceBaseline
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = metricName,
|
||||
Mean = 100,
|
||||
Median = 100,
|
||||
P90 = 120,
|
||||
P95 = 130,
|
||||
P99 = 150,
|
||||
Min = 80,
|
||||
Max = 160,
|
||||
StandardDeviation = 10,
|
||||
SampleCount = 100,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + TimeSpan.FromDays(7)
|
||||
};
|
||||
await _store.SaveBaselineAsync(baseline);
|
||||
|
||||
// Record significantly worse observations (20% regression)
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 120, // 20% higher than baseline mean
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act
|
||||
var analysis = await tracker.AnalyzeAsync(metricName);
|
||||
|
||||
// Assert
|
||||
analysis.Status.Should().BeOneOf(RegressionStatus.Regression, RegressionStatus.SevereRegression);
|
||||
analysis.Comparison!.PercentChange.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzeAsync_WithImprovedMetrics_ShouldReturnImprovementStatus()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Create a baseline
|
||||
var baseline = new PerformanceBaseline
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = metricName,
|
||||
Mean = 100,
|
||||
Median = 100,
|
||||
P90 = 120,
|
||||
P95 = 130,
|
||||
P99 = 150,
|
||||
Min = 80,
|
||||
Max = 160,
|
||||
StandardDeviation = 10,
|
||||
SampleCount = 100,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + TimeSpan.FromDays(7)
|
||||
};
|
||||
await _store.SaveBaselineAsync(baseline);
|
||||
|
||||
// Record significantly better observations (20% improvement)
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 80, // 20% lower than baseline mean
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act
|
||||
var analysis = await tracker.AnalyzeAsync(metricName);
|
||||
|
||||
// Assert
|
||||
analysis.Status.Should().Be(RegressionStatus.Improvement);
|
||||
analysis.Comparison!.PercentChange.Should().BeLessThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task UpdateBaselineAsync_ShouldApplySmoothingFactor()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricName = "gate.evaluation.duration";
|
||||
|
||||
// Create initial baseline
|
||||
var baseline = new PerformanceBaseline
|
||||
{
|
||||
Id = Guid.NewGuid(),
|
||||
MetricName = metricName,
|
||||
Mean = 100,
|
||||
Median = 100,
|
||||
P90 = 120,
|
||||
P95 = 130,
|
||||
P99 = 150,
|
||||
Min = 80,
|
||||
Max = 160,
|
||||
StandardDeviation = 15,
|
||||
SampleCount = 100,
|
||||
EstablishedAt = _timeProvider.GetUtcNow(),
|
||||
ValidUntil = _timeProvider.GetUtcNow() + TimeSpan.FromDays(7)
|
||||
};
|
||||
await _store.SaveBaselineAsync(baseline);
|
||||
|
||||
// Record new observations
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 90, // Lower than original baseline
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
|
||||
// Act
|
||||
var updatedBaseline = await tracker.UpdateBaselineAsync(metricName);
|
||||
|
||||
// Assert
|
||||
updatedBaseline.Should().NotBeNull();
|
||||
updatedBaseline.SampleCount.Should().Be(110); // Original 100 + 10 new
|
||||
// With smoothing factor 0.3: new_mean = 0.3 * 90 + 0.7 * 100 = 97
|
||||
updatedBaseline.Mean.Should().BeLessThan(100);
|
||||
updatedBaseline.Mean.Should().BeGreaterThan(90);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AnalyzeAllAsync_ShouldAnalyzeAllTrackedMetrics()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metricNames = new[] { "metric1", "metric2", "metric3" };
|
||||
|
||||
// Record observations for each metric
|
||||
foreach (var metricName in metricNames)
|
||||
{
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = metricName,
|
||||
Value = 100,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Act
|
||||
var analyses = await tracker.AnalyzeAllAsync();
|
||||
|
||||
// Assert
|
||||
analyses.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatistics_ShouldReturnCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
|
||||
// Record observations
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = "metric1",
|
||||
Value = 100,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = "metric1",
|
||||
Value = 110,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
await tracker.RecordMetricAsync(new PerformanceMetric
|
||||
{
|
||||
MetricName = "metric2",
|
||||
Value = 50,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
});
|
||||
|
||||
// Act
|
||||
var stats = tracker.GetStatistics();
|
||||
|
||||
// Assert
|
||||
stats.ActiveMetrics.Should().Be(2);
|
||||
stats.TotalObservations.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordMetricAsync_ShouldSetTimestamp()
|
||||
{
|
||||
// Arrange
|
||||
var tracker = CreateTracker();
|
||||
var metric = new PerformanceMetric
|
||||
{
|
||||
MetricName = "test.metric",
|
||||
Value = 100,
|
||||
Unit = MetricUnit.Milliseconds
|
||||
};
|
||||
|
||||
// Act
|
||||
await tracker.RecordMetricAsync(metric);
|
||||
|
||||
// Assert
|
||||
var stats = tracker.GetStatistics();
|
||||
stats.OldestObservation.Should().BeCloseTo(_timeProvider.GetUtcNow(), TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
private BaselineTracker CreateTracker()
|
||||
{
|
||||
return new BaselineTracker(
|
||||
_store,
|
||||
_timeProvider,
|
||||
_config,
|
||||
NullLogger<BaselineTracker>.Instance);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of IBaselineStore for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryBaselineStore : IBaselineStore
|
||||
{
|
||||
private readonly Dictionary<string, PerformanceBaseline> _baselines = new();
|
||||
private readonly List<MetricAggregate> _aggregates = new();
|
||||
|
||||
public Task<PerformanceBaseline?> GetBaselineAsync(string metricName, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_baselines.TryGetValue(metricName, out var baseline) ? baseline : null);
|
||||
}
|
||||
|
||||
public Task SaveBaselineAsync(PerformanceBaseline baseline, CancellationToken ct = default)
|
||||
{
|
||||
_baselines[baseline.MetricName] = baseline;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task SaveAggregateAsync(MetricAggregate aggregate, CancellationToken ct = default)
|
||||
{
|
||||
_aggregates.Add(aggregate);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<ImmutableArray<MetricAggregate>> GetAggregatesAsync(
|
||||
string metricName,
|
||||
DateTimeOffset from,
|
||||
DateTimeOffset to,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = _aggregates
|
||||
.Where(a => a.MetricName == metricName &&
|
||||
a.WindowStart >= from &&
|
||||
a.WindowEnd <= to)
|
||||
.ToImmutableArray();
|
||||
|
||||
return Task.FromResult(results);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,298 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.ReleaseOrchestrator.Performance.Pooling;
|
||||
|
||||
namespace StellaOps.ReleaseOrchestrator.Performance.Tests;
|
||||
|
||||
public sealed class ConnectionPoolManagerTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly InMemoryConnectionFactory _connectionFactory;
|
||||
private readonly ConnectionPoolConfig _config;
|
||||
|
||||
public ConnectionPoolManagerTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
_connectionFactory = new InMemoryConnectionFactory();
|
||||
_config = new ConnectionPoolConfig
|
||||
{
|
||||
MaxTotalConnections = 50,
|
||||
MaxConnectionsPerPool = 10,
|
||||
MaxRegistryConnectionsPerEndpoint = 5,
|
||||
MaxAgentConnectionsPerEndpoint = 3,
|
||||
IdleTimeout = TimeSpan.FromMinutes(5),
|
||||
ConnectionTimeout = TimeSpan.FromSeconds(10),
|
||||
MaxConnectionLifetime = TimeSpan.FromMinutes(30),
|
||||
CleanupInterval = TimeSpan.FromMinutes(1)
|
||||
};
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_ShouldReturnConnection()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Act
|
||||
await using var lease = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
|
||||
// Assert
|
||||
lease.Should().NotBeNull();
|
||||
lease.Connection.Should().NotBeNull();
|
||||
lease.Endpoint.Should().Be(endpoint);
|
||||
lease.ConnectionType.Should().Be(ConnectionType.Registry);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_ShouldCreateNewConnection()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Act
|
||||
await using var lease = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
|
||||
// Assert
|
||||
_connectionFactory.ConnectionsCreated.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_ShouldReuseReturnedConnection()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Act - acquire and release
|
||||
var lease1 = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
var connectionId1 = lease1.ConnectionId;
|
||||
await lease1.DisposeAsync();
|
||||
|
||||
// Acquire again
|
||||
var lease2 = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
var connectionId2 = lease2.ConnectionId;
|
||||
await lease2.DisposeAsync();
|
||||
|
||||
// Assert
|
||||
_connectionFactory.ConnectionsCreated.Should().Be(1);
|
||||
connectionId1.Should().Be(connectionId2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_WithDifferentEndpoints_ShouldCreateSeparatePools()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint1 = "registry1.example.com";
|
||||
var endpoint2 = "registry2.example.com";
|
||||
|
||||
// Act
|
||||
await using var lease1 = await manager.AcquireAsync(endpoint1, ConnectionType.Registry);
|
||||
await using var lease2 = await manager.AcquireAsync(endpoint2, ConnectionType.Registry);
|
||||
|
||||
// Assert
|
||||
_connectionFactory.ConnectionsCreated.Should().Be(2);
|
||||
lease1.Endpoint.Should().NotBe(lease2.Endpoint);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_WithDifferentConnectionTypes_ShouldCreateSeparatePools()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "example.com";
|
||||
|
||||
// Act
|
||||
await using var lease1 = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
await using var lease2 = await manager.AcquireAsync(endpoint, ConnectionType.Agent);
|
||||
|
||||
// Assert
|
||||
_connectionFactory.ConnectionsCreated.Should().Be(2);
|
||||
lease1.ConnectionType.Should().NotBe(lease2.ConnectionType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatistics_ShouldReturnCorrectCounts()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Act - acquire connections
|
||||
var lease1 = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
var lease2 = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
|
||||
var stats = manager.GetStatistics();
|
||||
|
||||
// Assert
|
||||
stats.TotalPools.Should().Be(1);
|
||||
stats.TotalConnections.Should().Be(2);
|
||||
stats.ActiveConnections.Should().Be(2);
|
||||
stats.IdleConnections.Should().Be(0);
|
||||
|
||||
// Release one connection
|
||||
await lease1.DisposeAsync();
|
||||
|
||||
stats = manager.GetStatistics();
|
||||
stats.ActiveConnections.Should().Be(1);
|
||||
stats.IdleConnections.Should().Be(1);
|
||||
|
||||
await lease2.DisposeAsync();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CleanupIdleConnectionsAsync_ShouldRemoveExpiredConnections()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Acquire and release a connection
|
||||
var lease = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
await lease.DisposeAsync();
|
||||
|
||||
// Verify it's in the pool
|
||||
var statsBeforeCleanup = manager.GetStatistics();
|
||||
statsBeforeCleanup.IdleConnections.Should().Be(1);
|
||||
|
||||
// Advance time past idle timeout
|
||||
_timeProvider.Advance(_config.IdleTimeout + TimeSpan.FromSeconds(1));
|
||||
|
||||
// Act
|
||||
await manager.CleanupIdleConnectionsAsync();
|
||||
|
||||
// Assert
|
||||
var statsAfterCleanup = manager.GetStatistics();
|
||||
statsAfterCleanup.IdleConnections.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RemovePoolAsync_ShouldDisposeAllConnections()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Acquire and release connections
|
||||
var lease = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
await lease.DisposeAsync();
|
||||
|
||||
// Verify pool exists
|
||||
var statsBefore = manager.GetStatistics();
|
||||
statsBefore.TotalPools.Should().Be(1);
|
||||
|
||||
// Act
|
||||
await manager.RemovePoolAsync(endpoint, ConnectionType.Registry);
|
||||
|
||||
// Assert
|
||||
var statsAfter = manager.GetStatistics();
|
||||
statsAfter.TotalPools.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_ShouldSetAcquiredAtTime()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Act
|
||||
await using var lease = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
|
||||
// Assert
|
||||
lease.AcquiredAt.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Connection_IsConnected_ShouldReturnTrue()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
|
||||
// Act
|
||||
await using var lease = await manager.AcquireAsync(endpoint, ConnectionType.Registry);
|
||||
|
||||
// Assert
|
||||
lease.Connection.IsConnected.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AcquireAsync_WithCancellation_ShouldRespectToken()
|
||||
{
|
||||
// Arrange
|
||||
await using var manager = CreateManager();
|
||||
var endpoint = "registry.example.com";
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<OperationCanceledException>(async () =>
|
||||
{
|
||||
await manager.AcquireAsync(endpoint, ConnectionType.Registry, cts.Token);
|
||||
});
|
||||
}
|
||||
|
||||
private ConnectionPoolManager CreateManager()
|
||||
{
|
||||
return new ConnectionPoolManager(
|
||||
_connectionFactory,
|
||||
_timeProvider,
|
||||
_config,
|
||||
NullLogger<ConnectionPoolManager>.Instance);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of IConnectionFactory for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryConnectionFactory : IConnectionFactory
|
||||
{
|
||||
private int _connectionsCreated;
|
||||
|
||||
public int ConnectionsCreated => _connectionsCreated;
|
||||
|
||||
public Task<IPooledConnection> CreateAsync(
|
||||
string endpoint,
|
||||
ConnectionType connectionType,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
Interlocked.Increment(ref _connectionsCreated);
|
||||
|
||||
return Task.FromResult<IPooledConnection>(new InMemoryPooledConnection(endpoint, connectionType));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of IPooledConnection for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryPooledConnection : IPooledConnection
|
||||
{
|
||||
public InMemoryPooledConnection(string endpoint, ConnectionType type)
|
||||
{
|
||||
Endpoint = endpoint;
|
||||
Type = type;
|
||||
}
|
||||
|
||||
public string Endpoint { get; }
|
||||
public ConnectionType Type { get; }
|
||||
public bool IsConnected { get; private set; } = true;
|
||||
public bool IsDisposed { get; private set; }
|
||||
|
||||
public Task<bool> ValidateAsync(CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(IsConnected && !IsDisposed);
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
IsConnected = false;
|
||||
IsDisposed = true;
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,293 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.ReleaseOrchestrator.Performance.Prefetch;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.ReleaseOrchestrator.Performance.Tests;
|
||||
|
||||
public sealed class DataPrefetcherTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly InMemoryPrefetchDataProvider _dataProvider;
|
||||
private readonly InMemoryPrefetchCache _cache;
|
||||
private readonly DataPrefetcherConfig _config;
|
||||
|
||||
public DataPrefetcherTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
_dataProvider = new InMemoryPrefetchDataProvider();
|
||||
_cache = new InMemoryPrefetchCache();
|
||||
_config = new DataPrefetcherConfig
|
||||
{
|
||||
MaxConcurrentPrefetches = 5,
|
||||
GateConfigTtl = TimeSpan.FromMinutes(10),
|
||||
ScanResultTtl = TimeSpan.FromMinutes(15),
|
||||
AttestationTtl = TimeSpan.FromMinutes(30),
|
||||
PredictionConfidenceThreshold = 0.6
|
||||
};
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EnqueueAsync_ShouldQueueRequest()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var request = new PrefetchRequest
|
||||
{
|
||||
DataType = PrefetchDataType.GateConfig,
|
||||
TargetId = Guid.NewGuid().ToString()
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.EnqueueAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Queued.Should().BeTrue();
|
||||
result.RequestId.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PrefetchForGateEvaluationAsync_ShouldPrefetchGateConfigs()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var promotionId = Guid.NewGuid();
|
||||
var gateIds = new List<Guid> { Guid.NewGuid(), Guid.NewGuid() };
|
||||
|
||||
// Setup data provider
|
||||
foreach (var gateId in gateIds)
|
||||
{
|
||||
_dataProvider.SetGateConfig(gateId, new { GateId = gateId, Name = $"Gate-{gateId}" });
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.PrefetchForGateEvaluationAsync(promotionId, gateIds);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(2);
|
||||
result.CacheMisses.Should().Be(2);
|
||||
result.Duration.Should().BeGreaterThan(TimeSpan.Zero);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PrefetchForGateEvaluationAsync_ShouldUseCacheForExistingItems()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var promotionId = Guid.NewGuid();
|
||||
var gateIds = new List<Guid> { Guid.NewGuid() };
|
||||
|
||||
// Pre-populate cache
|
||||
var cacheKey = $"gate-config:{gateIds[0]}";
|
||||
await _cache.SetAsync(cacheKey, new { Cached = true }, TimeSpan.FromMinutes(10));
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.PrefetchForGateEvaluationAsync(promotionId, gateIds);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(1);
|
||||
result.CacheHits.Should().Be(1);
|
||||
result.CacheMisses.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PrefetchScanResultsAsync_ShouldPrefetchAllDigests()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var digests = new List<string> { "sha256:abc123", "sha256:def456" };
|
||||
|
||||
foreach (var digest in digests)
|
||||
{
|
||||
_dataProvider.SetScanResult(digest, new { Digest = digest, Vulnerabilities = 0 });
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.PrefetchScanResultsAsync(digests);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(2);
|
||||
result.Items.All(i => i.DataType == PrefetchDataType.ScanResult).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PrefetchAttestationsAsync_ShouldPrefetchAllDigests()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var digests = new List<string> { "sha256:attestation1", "sha256:attestation2" };
|
||||
|
||||
foreach (var digest in digests)
|
||||
{
|
||||
_dataProvider.SetAttestation(digest, new { Digest = digest, Type = "slsa-provenance" });
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.PrefetchAttestationsAsync(digests);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(2);
|
||||
result.Items.All(i => i.DataType == PrefetchDataType.Attestation).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WarmCacheForPipelineAsync_ShouldPrefetchAllData()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var pipelineId = Guid.NewGuid();
|
||||
var gateIds = ImmutableArray.Create(Guid.NewGuid(), Guid.NewGuid());
|
||||
var artifacts = ImmutableArray.Create("sha256:artifact1", "sha256:artifact2");
|
||||
|
||||
_dataProvider.SetPipelineMetadata(pipelineId, new PipelineMetadata
|
||||
{
|
||||
PipelineId = pipelineId,
|
||||
GateIds = gateIds,
|
||||
ArtifactDigests = artifacts,
|
||||
EnvironmentIds = []
|
||||
});
|
||||
|
||||
foreach (var gateId in gateIds)
|
||||
{
|
||||
_dataProvider.SetGateConfig(gateId, new { GateId = gateId });
|
||||
}
|
||||
|
||||
foreach (var artifact in artifacts)
|
||||
{
|
||||
_dataProvider.SetScanResult(artifact, new { Digest = artifact });
|
||||
_dataProvider.SetAttestation(artifact, new { Digest = artifact });
|
||||
}
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.WarmCacheForPipelineAsync(pipelineId);
|
||||
|
||||
// Assert
|
||||
result.PipelineId.Should().Be(pipelineId);
|
||||
result.Items.Should().HaveCountGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetStatistics_ShouldReturnCurrentStats()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
|
||||
// Act
|
||||
var stats = prefetcher.GetStatistics();
|
||||
|
||||
// Assert
|
||||
stats.Timestamp.Should().BeCloseTo(_timeProvider.GetUtcNow(), TimeSpan.FromSeconds(1));
|
||||
stats.ActiveJobs.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PrefetchForGateEvaluationAsync_WithEmptyGateList_ShouldReturnEmptyResult()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var promotionId = Guid.NewGuid();
|
||||
var gateIds = new List<Guid>();
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.PrefetchForGateEvaluationAsync(promotionId, gateIds);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().BeEmpty();
|
||||
result.CacheHits.Should().Be(0);
|
||||
result.CacheMisses.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PrefetchScanResultsAsync_WithMissingData_ShouldHandleGracefully()
|
||||
{
|
||||
// Arrange
|
||||
var prefetcher = CreatePrefetcher();
|
||||
var digests = new List<string> { "sha256:missing" };
|
||||
|
||||
// No data set for this digest
|
||||
|
||||
// Act
|
||||
var result = await prefetcher.PrefetchScanResultsAsync(digests);
|
||||
|
||||
// Assert
|
||||
result.Items.Should().HaveCount(1);
|
||||
result.Items[0].Success.Should().BeFalse();
|
||||
}
|
||||
|
||||
private DataPrefetcher CreatePrefetcher()
|
||||
{
|
||||
return new DataPrefetcher(
|
||||
_dataProvider,
|
||||
_cache,
|
||||
_timeProvider,
|
||||
_config,
|
||||
NullLogger<DataPrefetcher>.Instance);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of IPrefetchDataProvider for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryPrefetchDataProvider : IPrefetchDataProvider
|
||||
{
|
||||
private readonly Dictionary<Guid, object> _gateConfigs = new();
|
||||
private readonly Dictionary<string, object> _scanResults = new();
|
||||
private readonly Dictionary<string, object> _attestations = new();
|
||||
private readonly Dictionary<Guid, PipelineMetadata> _pipelineMetadata = new();
|
||||
|
||||
public void SetGateConfig(Guid gateId, object config) => _gateConfigs[gateId] = config;
|
||||
public void SetScanResult(string digest, object result) => _scanResults[digest] = result;
|
||||
public void SetAttestation(string digest, object attestation) => _attestations[digest] = attestation;
|
||||
public void SetPipelineMetadata(Guid pipelineId, PipelineMetadata metadata) => _pipelineMetadata[pipelineId] = metadata;
|
||||
|
||||
public Task<object?> GetGateConfigAsync(Guid gateId, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_gateConfigs.TryGetValue(gateId, out var config) ? config : null);
|
||||
}
|
||||
|
||||
public Task<object?> GetScanResultAsync(string digest, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_scanResults.TryGetValue(digest, out var result) ? result : null);
|
||||
}
|
||||
|
||||
public Task<object?> GetAttestationAsync(string digest, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_attestations.TryGetValue(digest, out var attestation) ? attestation : null);
|
||||
}
|
||||
|
||||
public Task<PipelineMetadata?> GetPipelineMetadataAsync(Guid pipelineId, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_pipelineMetadata.TryGetValue(pipelineId, out var metadata) ? metadata : null);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of IPrefetchCache for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryPrefetchCache : IPrefetchCache
|
||||
{
|
||||
private readonly Dictionary<string, object> _cache = new();
|
||||
|
||||
public Task<bool> ExistsAsync(string key, CancellationToken ct = default)
|
||||
{
|
||||
return Task.FromResult(_cache.ContainsKey(key));
|
||||
}
|
||||
|
||||
public Task<T?> GetAsync<T>(string key, CancellationToken ct = default) where T : class
|
||||
{
|
||||
return Task.FromResult(_cache.TryGetValue(key, out var value) ? value as T : null);
|
||||
}
|
||||
|
||||
public Task SetAsync(string key, object value, TimeSpan ttl, CancellationToken ct = default)
|
||||
{
|
||||
_cache[key] = value;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RemoveAsync(string key, CancellationToken ct = default)
|
||||
{
|
||||
_cache.Remove(key);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.ReleaseOrchestrator.Performance\StellaOps.ReleaseOrchestrator.Performance.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" />
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
|
||||
<PackageReference Include="Moq" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
Reference in New Issue
Block a user