sprints work
This commit is contained in:
325
src/__Libraries/StellaOps.Provcache/DecisionDigestBuilder.cs
Normal file
325
src/__Libraries/StellaOps.Provcache/DecisionDigestBuilder.cs
Normal file
@@ -0,0 +1,325 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Builder for constructing <see cref="DecisionDigest"/> from evaluation results.
|
||||
/// Ensures deterministic digest computation for cache consistency.
|
||||
/// </summary>
|
||||
public sealed class DecisionDigestBuilder
|
||||
{
|
||||
private string? _veriKey;
|
||||
private string? _verdictHash;
|
||||
private string? _proofRoot;
|
||||
private ReplaySeed? _replaySeed;
|
||||
private DateTimeOffset? _createdAt;
|
||||
private DateTimeOffset? _expiresAt;
|
||||
private int? _trustScore;
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new DecisionDigestBuilder with default options.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder() : this(new ProvcacheOptions(), TimeProvider.System)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new DecisionDigestBuilder with the specified options.
|
||||
/// </summary>
|
||||
/// <param name="options">Provcache configuration options.</param>
|
||||
/// <param name="timeProvider">Time provider for timestamps.</param>
|
||||
public DecisionDigestBuilder(ProvcacheOptions options, TimeProvider? timeProvider = null)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the VeriKey for this digest.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithVeriKey(string veriKey)
|
||||
{
|
||||
_veriKey = veriKey ?? throw new ArgumentNullException(nameof(veriKey));
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the VeriKey from a builder.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithVeriKey(VeriKeyBuilder builder)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(builder);
|
||||
_veriKey = builder.Build();
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the verdict hash directly.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithVerdictHash(string verdictHash)
|
||||
{
|
||||
_verdictHash = verdictHash ?? throw new ArgumentNullException(nameof(verdictHash));
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes verdict hash from sorted dispositions.
|
||||
/// Dispositions are sorted by key for deterministic hashing.
|
||||
/// </summary>
|
||||
/// <param name="dispositions">Dictionary of finding ID to disposition.</param>
|
||||
public DecisionDigestBuilder WithDispositions(IReadOnlyDictionary<string, string> dispositions)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(dispositions);
|
||||
|
||||
// Sort by key for deterministic hash
|
||||
var sorted = dispositions
|
||||
.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
_verdictHash = ComputeHash(Encoding.UTF8.GetBytes("empty-verdict"));
|
||||
return this;
|
||||
}
|
||||
|
||||
// Build deterministic string: key1=value1|key2=value2|...
|
||||
var sb = new StringBuilder();
|
||||
foreach (var (key, value) in sorted)
|
||||
{
|
||||
if (sb.Length > 0) sb.Append('|');
|
||||
sb.Append(key);
|
||||
sb.Append('=');
|
||||
sb.Append(value);
|
||||
}
|
||||
|
||||
_verdictHash = ComputeHash(Encoding.UTF8.GetBytes(sb.ToString()));
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the proof root (Merkle root of evidence) directly.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithProofRoot(string proofRoot)
|
||||
{
|
||||
_proofRoot = proofRoot ?? throw new ArgumentNullException(nameof(proofRoot));
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes proof root from a list of evidence chunk hashes.
|
||||
/// Builds a simple binary Merkle tree for verification.
|
||||
/// </summary>
|
||||
/// <param name="evidenceChunkHashes">Ordered list of evidence chunk hashes.</param>
|
||||
public DecisionDigestBuilder WithEvidenceChunks(IReadOnlyList<string> evidenceChunkHashes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(evidenceChunkHashes);
|
||||
|
||||
if (evidenceChunkHashes.Count == 0)
|
||||
{
|
||||
_proofRoot = ComputeHash(Encoding.UTF8.GetBytes("empty-proof"));
|
||||
return this;
|
||||
}
|
||||
|
||||
// Simple Merkle tree: recursively pair and hash until single root
|
||||
var currentLevel = evidenceChunkHashes
|
||||
.Select(h => Convert.FromHexString(StripPrefix(h)))
|
||||
.ToList();
|
||||
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (int i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
if (i + 1 < currentLevel.Count)
|
||||
{
|
||||
// Hash pair
|
||||
var combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
|
||||
currentLevel[i].CopyTo(combined, 0);
|
||||
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
|
||||
nextLevel.Add(SHA256.HashData(combined));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Odd element: promote to next level
|
||||
nextLevel.Add(currentLevel[i]);
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
_proofRoot = $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the replay seed directly.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithReplaySeed(ReplaySeed replaySeed)
|
||||
{
|
||||
_replaySeed = replaySeed ?? throw new ArgumentNullException(nameof(replaySeed));
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds replay seed from feed and rule identifiers.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithReplaySeed(
|
||||
IEnumerable<string> feedIds,
|
||||
IEnumerable<string> ruleIds,
|
||||
DateTimeOffset? frozenEpoch = null)
|
||||
{
|
||||
_replaySeed = new ReplaySeed
|
||||
{
|
||||
FeedIds = feedIds?.ToList() ?? [],
|
||||
RuleIds = ruleIds?.ToList() ?? [],
|
||||
FrozenEpoch = frozenEpoch
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets explicit timestamps for created and expires.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithTimestamps(DateTimeOffset createdAt, DateTimeOffset expiresAt)
|
||||
{
|
||||
_createdAt = createdAt;
|
||||
_expiresAt = expiresAt;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets timestamps using the default TTL from options.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder WithDefaultTimestamps()
|
||||
{
|
||||
_createdAt = _timeProvider.GetUtcNow();
|
||||
_expiresAt = _createdAt.Value.Add(_options.DefaultTtl);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the trust score directly.
|
||||
/// </summary>
|
||||
/// <param name="trustScore">Trust score (0-100).</param>
|
||||
public DecisionDigestBuilder WithTrustScore(int trustScore)
|
||||
{
|
||||
if (trustScore < 0 || trustScore > 100)
|
||||
throw new ArgumentOutOfRangeException(nameof(trustScore), "Trust score must be between 0 and 100.");
|
||||
|
||||
_trustScore = trustScore;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes trust score from component scores using weighted formula.
|
||||
/// </summary>
|
||||
/// <param name="reachabilityScore">Reachability analysis coverage (0-100).</param>
|
||||
/// <param name="sbomCompletenessScore">SBOM completeness (0-100).</param>
|
||||
/// <param name="vexCoverageScore">VEX statement coverage (0-100).</param>
|
||||
/// <param name="policyFreshnessScore">Policy freshness (0-100).</param>
|
||||
/// <param name="signerTrustScore">Signer trust level (0-100).</param>
|
||||
public DecisionDigestBuilder WithTrustScore(
|
||||
int reachabilityScore,
|
||||
int sbomCompletenessScore,
|
||||
int vexCoverageScore,
|
||||
int policyFreshnessScore,
|
||||
int signerTrustScore)
|
||||
{
|
||||
// Weights from documentation:
|
||||
// Reachability: 25%, SBOM: 20%, VEX: 20%, Policy: 15%, Signer: 20%
|
||||
_trustScore = (int)Math.Round(
|
||||
reachabilityScore * 0.25 +
|
||||
sbomCompletenessScore * 0.20 +
|
||||
vexCoverageScore * 0.20 +
|
||||
policyFreshnessScore * 0.15 +
|
||||
signerTrustScore * 0.20);
|
||||
|
||||
// Clamp to valid range
|
||||
_trustScore = Math.Clamp(_trustScore.Value, 0, 100);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the final DecisionDigest.
|
||||
/// </summary>
|
||||
/// <returns>The constructed DecisionDigest.</returns>
|
||||
/// <exception cref="InvalidOperationException">If required components are missing.</exception>
|
||||
public DecisionDigest Build()
|
||||
{
|
||||
ValidateRequiredComponents();
|
||||
|
||||
return new DecisionDigest
|
||||
{
|
||||
DigestVersion = _options.DigestVersion,
|
||||
VeriKey = _veriKey!,
|
||||
VerdictHash = _verdictHash!,
|
||||
ProofRoot = _proofRoot!,
|
||||
ReplaySeed = _replaySeed!,
|
||||
CreatedAt = _createdAt!.Value,
|
||||
ExpiresAt = _expiresAt!.Value,
|
||||
TrustScore = _trustScore!.Value
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets the builder to its initial state.
|
||||
/// </summary>
|
||||
public DecisionDigestBuilder Reset()
|
||||
{
|
||||
_veriKey = null;
|
||||
_verdictHash = null;
|
||||
_proofRoot = null;
|
||||
_replaySeed = null;
|
||||
_createdAt = null;
|
||||
_expiresAt = null;
|
||||
_trustScore = null;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void ValidateRequiredComponents()
|
||||
{
|
||||
var missing = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_veriKey))
|
||||
missing.Add("VeriKey");
|
||||
if (string.IsNullOrWhiteSpace(_verdictHash))
|
||||
missing.Add("VerdictHash");
|
||||
if (string.IsNullOrWhiteSpace(_proofRoot))
|
||||
missing.Add("ProofRoot");
|
||||
if (_replaySeed is null)
|
||||
missing.Add("ReplaySeed");
|
||||
if (!_createdAt.HasValue)
|
||||
missing.Add("CreatedAt");
|
||||
if (!_expiresAt.HasValue)
|
||||
missing.Add("ExpiresAt");
|
||||
if (!_trustScore.HasValue)
|
||||
missing.Add("TrustScore");
|
||||
|
||||
if (missing.Count > 0)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Cannot build DecisionDigest: missing required components: {string.Join(", ", missing)}. " +
|
||||
"Use the With* methods to set all required components before calling Build().");
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeHash(ReadOnlySpan<byte> data)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(data, hash);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static string StripPrefix(string hash)
|
||||
{
|
||||
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
return hash[7..];
|
||||
return hash;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Provcache.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// EF Core entity for provcache.provcache_items table.
|
||||
/// </summary>
|
||||
[Table("provcache_items", Schema = "provcache")]
|
||||
public sealed class ProvcacheItemEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Composite cache key (VeriKey).
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("verikey")]
|
||||
[MaxLength(512)]
|
||||
public required string VeriKey { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Schema version of the digest format.
|
||||
/// </summary>
|
||||
[Column("digest_version")]
|
||||
[MaxLength(16)]
|
||||
public required string DigestVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of sorted dispositions.
|
||||
/// </summary>
|
||||
[Column("verdict_hash")]
|
||||
[MaxLength(128)]
|
||||
public required string VerdictHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root of evidence.
|
||||
/// </summary>
|
||||
[Column("proof_root")]
|
||||
[MaxLength(128)]
|
||||
public required string ProofRoot { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Replay seed as JSON (feed IDs, rule IDs, frozen epoch).
|
||||
/// </summary>
|
||||
[Column("replay_seed", TypeName = "jsonb")]
|
||||
public required string ReplaySeed { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the policy bundle.
|
||||
/// </summary>
|
||||
[Column("policy_hash")]
|
||||
[MaxLength(128)]
|
||||
public required string PolicyHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the signer certificate set.
|
||||
/// </summary>
|
||||
[Column("signer_set_hash")]
|
||||
[MaxLength(128)]
|
||||
public required string SignerSetHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory feed epoch identifier.
|
||||
/// </summary>
|
||||
[Column("feed_epoch")]
|
||||
[MaxLength(64)]
|
||||
public required string FeedEpoch { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Trust score (0-100).
|
||||
/// </summary>
|
||||
[Column("trust_score")]
|
||||
[Range(0, 100)]
|
||||
public int TrustScore { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of cache hits.
|
||||
/// </summary>
|
||||
[Column("hit_count")]
|
||||
public long HitCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when entry was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when entry expires.
|
||||
/// </summary>
|
||||
[Column("expires_at")]
|
||||
public DateTimeOffset ExpiresAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when entry was last updated.
|
||||
/// </summary>
|
||||
[Column("updated_at")]
|
||||
public DateTimeOffset UpdatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when entry was last accessed.
|
||||
/// </summary>
|
||||
[Column("last_accessed_at")]
|
||||
public DateTimeOffset? LastAccessedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EF Core entity for provcache.prov_evidence_chunks table.
|
||||
/// </summary>
|
||||
[Table("prov_evidence_chunks", Schema = "provcache")]
|
||||
public sealed class ProvcacheEvidenceChunkEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique chunk identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("chunk_id")]
|
||||
public Guid ChunkId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Proof root this chunk belongs to.
|
||||
/// </summary>
|
||||
[Column("proof_root")]
|
||||
[MaxLength(128)]
|
||||
public required string ProofRoot { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Index of this chunk in the Merkle tree.
|
||||
/// </summary>
|
||||
[Column("chunk_index")]
|
||||
public int ChunkIndex { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the chunk content.
|
||||
/// </summary>
|
||||
[Column("chunk_hash")]
|
||||
[MaxLength(128)]
|
||||
public required string ChunkHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Chunk content.
|
||||
/// </summary>
|
||||
[Column("blob")]
|
||||
public required byte[] Blob { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Size of the blob in bytes.
|
||||
/// </summary>
|
||||
[Column("blob_size")]
|
||||
public int BlobSize { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type of the content.
|
||||
/// </summary>
|
||||
[Column("content_type")]
|
||||
[MaxLength(128)]
|
||||
public required string ContentType { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when chunk was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EF Core entity for provcache.prov_revocations table.
|
||||
/// </summary>
|
||||
[Table("prov_revocations", Schema = "provcache")]
|
||||
public sealed class ProvcacheRevocationEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique revocation identifier.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("revocation_id")]
|
||||
public Guid RevocationId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of revocation (policy, signer, feed, pattern).
|
||||
/// </summary>
|
||||
[Column("revocation_type")]
|
||||
[MaxLength(64)]
|
||||
public required string RevocationType { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Target hash that was revoked.
|
||||
/// </summary>
|
||||
[Column("target_hash")]
|
||||
[MaxLength(256)]
|
||||
public required string TargetHash { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for revocation.
|
||||
/// </summary>
|
||||
[Column("reason")]
|
||||
[MaxLength(512)]
|
||||
public string? Reason { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Actor who initiated the revocation.
|
||||
/// </summary>
|
||||
[Column("actor")]
|
||||
[MaxLength(256)]
|
||||
public string? Actor { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of entries affected by the revocation.
|
||||
/// </summary>
|
||||
[Column("entries_affected")]
|
||||
public long EntriesAffected { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when revocation occurred.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
}
|
||||
137
src/__Libraries/StellaOps.Provcache/IProvcacheRepository.cs
Normal file
137
src/__Libraries/StellaOps.Provcache/IProvcacheRepository.cs
Normal file
@@ -0,0 +1,137 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface for Provcache entries in persistent storage (Postgres).
|
||||
/// </summary>
|
||||
public interface IProvcacheRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a cache entry by VeriKey.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The cache entry if found, null otherwise.</returns>
|
||||
Task<ProvcacheEntry?> GetAsync(string veriKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets multiple cache entries by VeriKeys.
|
||||
/// </summary>
|
||||
/// <param name="veriKeys">The cache keys.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Dictionary of found entries.</returns>
|
||||
Task<IReadOnlyDictionary<string, ProvcacheEntry>> GetManyAsync(
|
||||
IEnumerable<string> veriKeys,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Inserts or updates a cache entry.
|
||||
/// </summary>
|
||||
/// <param name="entry">The cache entry to upsert.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task UpsertAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Inserts or updates multiple cache entries in a batch.
|
||||
/// </summary>
|
||||
/// <param name="entries">The cache entries to upsert.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task UpsertManyAsync(IEnumerable<ProvcacheEntry> entries, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes a cache entry by VeriKey.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the entry existed and was deleted.</returns>
|
||||
Task<bool> DeleteAsync(string veriKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes entries matching a policy hash.
|
||||
/// Used when policy is updated.
|
||||
/// </summary>
|
||||
/// <param name="policyHash">The policy hash to match.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries deleted.</returns>
|
||||
Task<long> DeleteByPolicyHashAsync(string policyHash, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes entries matching a signer set hash.
|
||||
/// Used when a signer is revoked.
|
||||
/// </summary>
|
||||
/// <param name="signerSetHash">The signer set hash to match.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries deleted.</returns>
|
||||
Task<long> DeleteBySignerSetHashAsync(string signerSetHash, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes entries older than a feed epoch.
|
||||
/// Used when feeds are updated.
|
||||
/// </summary>
|
||||
/// <param name="feedEpoch">The minimum feed epoch to keep.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries deleted.</returns>
|
||||
Task<long> DeleteByFeedEpochOlderThanAsync(string feedEpoch, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes expired entries.
|
||||
/// </summary>
|
||||
/// <param name="asOf">The reference timestamp for expiry check.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries deleted.</returns>
|
||||
Task<long> DeleteExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Increments the hit count for an entry.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task IncrementHitCountAsync(string veriKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets cache statistics.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Cache statistics.</returns>
|
||||
Task<ProvcacheStatistics> GetStatisticsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cache statistics for monitoring and diagnostics.
|
||||
/// </summary>
|
||||
public sealed record ProvcacheStatistics
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of entries in the cache.
|
||||
/// </summary>
|
||||
public long TotalEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of cache hits.
|
||||
/// </summary>
|
||||
public long TotalHits { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of entries expiring within the next hour.
|
||||
/// </summary>
|
||||
public long ExpiringWithinHour { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of unique policy hashes.
|
||||
/// </summary>
|
||||
public int UniquePolicies { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of unique signer set hashes.
|
||||
/// </summary>
|
||||
public int UniqueSignerSets { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Oldest entry timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? OldestEntry { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Newest entry timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? NewestEntry { get; init; }
|
||||
}
|
||||
363
src/__Libraries/StellaOps.Provcache/IProvcacheService.cs
Normal file
363
src/__Libraries/StellaOps.Provcache/IProvcacheService.cs
Normal file
@@ -0,0 +1,363 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// High-level service interface for Provcache operations.
|
||||
/// Orchestrates cache store and repository with metrics and invalidation logic.
|
||||
/// </summary>
|
||||
public interface IProvcacheService
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a cached decision by VeriKey.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="bypassCache">If true, skip cache and force re-evaluation.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The cache result with decision if found.</returns>
|
||||
Task<ProvcacheServiceResult> GetAsync(
|
||||
string veriKey,
|
||||
bool bypassCache = false,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stores a decision in the cache.
|
||||
/// </summary>
|
||||
/// <param name="entry">The cache entry to store.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the entry was stored successfully.</returns>
|
||||
Task<bool> SetAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or computes a decision using a factory function for cache misses.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="factory">Factory function to create the entry on cache miss.</param>
|
||||
/// <param name="bypassCache">If true, skip cache and force re-computation.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The cached or newly computed entry.</returns>
|
||||
Task<ProvcacheEntry> GetOrComputeAsync(
|
||||
string veriKey,
|
||||
Func<CancellationToken, Task<ProvcacheEntry>> factory,
|
||||
bool bypassCache = false,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates a cache entry by VeriKey.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="reason">Reason for invalidation (for audit log).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the entry existed and was invalidated.</returns>
|
||||
Task<bool> InvalidateAsync(
|
||||
string veriKey,
|
||||
string? reason = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates entries by invalidation criteria.
|
||||
/// </summary>
|
||||
/// <param name="request">The invalidation request.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Invalidation result with count of affected entries.</returns>
|
||||
Task<InvalidationResult> InvalidateByAsync(
|
||||
InvalidationRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets cache metrics for monitoring.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Cache metrics.</returns>
|
||||
Task<ProvcacheMetrics> GetMetricsAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Prunes expired entries from the cache.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries pruned.</returns>
|
||||
Task<long> PruneExpiredAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a cache service lookup.
|
||||
/// </summary>
|
||||
public sealed record ProvcacheServiceResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The cache result status.
|
||||
/// </summary>
|
||||
public required ProvcacheResultStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The cache entry if found.
|
||||
/// </summary>
|
||||
public ProvcacheEntry? Entry { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the result came from cache (true) or needs computation (false).
|
||||
/// </summary>
|
||||
public bool WasCached => Status == ProvcacheResultStatus.CacheHit;
|
||||
|
||||
/// <summary>
|
||||
/// Source of the cache hit for diagnostics.
|
||||
/// </summary>
|
||||
public string? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time taken for the lookup in milliseconds.
|
||||
/// </summary>
|
||||
public double ElapsedMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a cache hit result.
|
||||
/// </summary>
|
||||
public static ProvcacheServiceResult Hit(ProvcacheEntry entry, string source, double elapsedMs) => new()
|
||||
{
|
||||
Status = ProvcacheResultStatus.CacheHit,
|
||||
Entry = entry,
|
||||
Source = source,
|
||||
ElapsedMs = elapsedMs
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a cache miss result.
|
||||
/// </summary>
|
||||
public static ProvcacheServiceResult Miss(double elapsedMs) => new()
|
||||
{
|
||||
Status = ProvcacheResultStatus.CacheMiss,
|
||||
Entry = null,
|
||||
Source = null,
|
||||
ElapsedMs = elapsedMs
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a bypassed result (cache was skipped).
|
||||
/// </summary>
|
||||
public static ProvcacheServiceResult Bypassed() => new()
|
||||
{
|
||||
Status = ProvcacheResultStatus.Bypassed,
|
||||
Entry = null,
|
||||
Source = null,
|
||||
ElapsedMs = 0
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates an expired result.
|
||||
/// </summary>
|
||||
public static ProvcacheServiceResult Expired(ProvcacheEntry entry, double elapsedMs) => new()
|
||||
{
|
||||
Status = ProvcacheResultStatus.Expired,
|
||||
Entry = entry,
|
||||
Source = "expired",
|
||||
ElapsedMs = elapsedMs
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cache result status.
|
||||
/// </summary>
|
||||
public enum ProvcacheResultStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Entry was found in cache and is valid.
|
||||
/// </summary>
|
||||
CacheHit,
|
||||
|
||||
/// <summary>
|
||||
/// Entry was not found in cache.
|
||||
/// </summary>
|
||||
CacheMiss,
|
||||
|
||||
/// <summary>
|
||||
/// Cache was bypassed (force re-computation).
|
||||
/// </summary>
|
||||
Bypassed,
|
||||
|
||||
/// <summary>
|
||||
/// Entry was found but has expired.
|
||||
/// </summary>
|
||||
Expired
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for cache invalidation by criteria.
|
||||
/// </summary>
|
||||
public sealed record InvalidationRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The invalidation type.
|
||||
/// </summary>
|
||||
public required InvalidationType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The value to match for invalidation.
|
||||
/// </summary>
|
||||
public required string Value { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for invalidation (for audit log).
|
||||
/// </summary>
|
||||
public string? Reason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actor who initiated the invalidation.
|
||||
/// </summary>
|
||||
public string? Actor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an invalidation request by policy hash.
|
||||
/// </summary>
|
||||
public static InvalidationRequest ByPolicyHash(string policyHash, string? reason = null) => new()
|
||||
{
|
||||
Type = InvalidationType.PolicyHash,
|
||||
Value = policyHash,
|
||||
Reason = reason ?? "policy-update"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates an invalidation request by signer set hash.
|
||||
/// </summary>
|
||||
public static InvalidationRequest BySignerSetHash(string signerSetHash, string? reason = null) => new()
|
||||
{
|
||||
Type = InvalidationType.SignerSetHash,
|
||||
Value = signerSetHash,
|
||||
Reason = reason ?? "signer-revocation"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates an invalidation request by feed epoch.
|
||||
/// </summary>
|
||||
public static InvalidationRequest ByFeedEpochOlderThan(string feedEpoch, string? reason = null) => new()
|
||||
{
|
||||
Type = InvalidationType.FeedEpochOlderThan,
|
||||
Value = feedEpoch,
|
||||
Reason = reason ?? "feed-update"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates an invalidation request by key pattern.
|
||||
/// </summary>
|
||||
public static InvalidationRequest ByPattern(string pattern, string? reason = null) => new()
|
||||
{
|
||||
Type = InvalidationType.Pattern,
|
||||
Value = pattern,
|
||||
Reason = reason ?? "pattern-invalidation"
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of invalidation criteria.
|
||||
/// </summary>
|
||||
public enum InvalidationType
|
||||
{
|
||||
/// <summary>
|
||||
/// Invalidate by policy hash.
|
||||
/// </summary>
|
||||
PolicyHash,
|
||||
|
||||
/// <summary>
|
||||
/// Invalidate by signer set hash.
|
||||
/// </summary>
|
||||
SignerSetHash,
|
||||
|
||||
/// <summary>
|
||||
/// Invalidate entries with feed epoch older than specified.
|
||||
/// </summary>
|
||||
FeedEpochOlderThan,
|
||||
|
||||
/// <summary>
|
||||
/// Invalidate by key pattern.
|
||||
/// </summary>
|
||||
Pattern,
|
||||
|
||||
/// <summary>
|
||||
/// Invalidate expired entries.
|
||||
/// </summary>
|
||||
Expired
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of an invalidation operation.
|
||||
/// </summary>
|
||||
public sealed record InvalidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of entries invalidated.
|
||||
/// </summary>
|
||||
public required long EntriesAffected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The invalidation request that was executed.
|
||||
/// </summary>
|
||||
public required InvalidationRequest Request { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of the invalidation.
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the invalidation was logged for audit.
|
||||
/// </summary>
|
||||
public bool WasLogged { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cache metrics for monitoring and observability.
|
||||
/// </summary>
|
||||
public sealed record ProvcacheMetrics
|
||||
{
|
||||
/// <summary>
|
||||
/// Total cache requests since startup.
|
||||
/// </summary>
|
||||
public long TotalRequests { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total cache hits since startup.
|
||||
/// </summary>
|
||||
public long TotalHits { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total cache misses since startup.
|
||||
/// </summary>
|
||||
public long TotalMisses { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cache hit rate (0.0 - 1.0).
|
||||
/// </summary>
|
||||
public double HitRate => TotalRequests == 0 ? 0.0 : (double)TotalHits / TotalRequests;
|
||||
|
||||
/// <summary>
|
||||
/// Average lookup latency in milliseconds.
|
||||
/// </summary>
|
||||
public double AvgLatencyMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// P99 lookup latency in milliseconds.
|
||||
/// </summary>
|
||||
public double P99LatencyMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current number of entries in cache.
|
||||
/// </summary>
|
||||
public long CurrentEntryCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total invalidations since startup.
|
||||
/// </summary>
|
||||
public long TotalInvalidations { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Valkey cache health status.
|
||||
/// </summary>
|
||||
public bool ValkeyCacheHealthy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Postgres repository health status.
|
||||
/// </summary>
|
||||
public bool PostgresRepositoryHealthy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when metrics were collected.
|
||||
/// </summary>
|
||||
public DateTimeOffset CollectedAt { get; init; }
|
||||
}
|
||||
150
src/__Libraries/StellaOps.Provcache/IProvcacheStore.cs
Normal file
150
src/__Libraries/StellaOps.Provcache/IProvcacheStore.cs
Normal file
@@ -0,0 +1,150 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Cache store interface for Provcache with read-through semantics.
|
||||
/// Abstracts the caching layer (Valkey, in-memory, etc.).
|
||||
/// </summary>
|
||||
public interface IProvcacheStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the store provider name for diagnostics.
|
||||
/// </summary>
|
||||
string ProviderName { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets a cache entry by VeriKey.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Cache result indicating hit/miss with the entry if found.</returns>
|
||||
ValueTask<ProvcacheLookupResult> GetAsync(string veriKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets multiple cache entries by VeriKeys.
|
||||
/// </summary>
|
||||
/// <param name="veriKeys">The cache keys.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Dictionary of found entries and list of misses.</returns>
|
||||
ValueTask<ProvcacheBatchLookupResult> GetManyAsync(
|
||||
IEnumerable<string> veriKeys,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Sets a cache entry.
|
||||
/// </summary>
|
||||
/// <param name="entry">The cache entry to set.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
ValueTask SetAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Sets multiple cache entries in a batch.
|
||||
/// </summary>
|
||||
/// <param name="entries">The cache entries to set.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
ValueTask SetManyAsync(IEnumerable<ProvcacheEntry> entries, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates a cache entry by VeriKey.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the entry existed and was invalidated.</returns>
|
||||
ValueTask<bool> InvalidateAsync(string veriKey, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Invalidates entries matching a key pattern.
|
||||
/// </summary>
|
||||
/// <param name="pattern">The key pattern (supports wildcards).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of entries invalidated.</returns>
|
||||
ValueTask<long> InvalidateByPatternAsync(string pattern, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a cache entry using a factory function for cache misses.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The cache key.</param>
|
||||
/// <param name="factory">Factory function to create the entry on cache miss.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The cached or newly created entry.</returns>
|
||||
ValueTask<ProvcacheEntry> GetOrSetAsync(
|
||||
string veriKey,
|
||||
Func<CancellationToken, ValueTask<ProvcacheEntry>> factory,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a single cache lookup.
|
||||
/// </summary>
|
||||
public sealed record ProvcacheLookupResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the entry was found in cache.
|
||||
/// </summary>
|
||||
public required bool IsHit { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The cache entry if found.
|
||||
/// </summary>
|
||||
public ProvcacheEntry? Entry { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the cache hit (e.g., "valkey", "postgres").
|
||||
/// Null for cache misses.
|
||||
/// </summary>
|
||||
public string? Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time taken for the lookup in milliseconds.
|
||||
/// </summary>
|
||||
public double ElapsedMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a cache hit result.
|
||||
/// </summary>
|
||||
public static ProvcacheLookupResult Hit(ProvcacheEntry entry, string source, double elapsedMs) => new()
|
||||
{
|
||||
IsHit = true,
|
||||
Entry = entry,
|
||||
Source = source,
|
||||
ElapsedMs = elapsedMs
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a cache miss result.
|
||||
/// </summary>
|
||||
public static ProvcacheLookupResult Miss(double elapsedMs) => new()
|
||||
{
|
||||
IsHit = false,
|
||||
Entry = null,
|
||||
Source = null,
|
||||
ElapsedMs = elapsedMs
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a batch cache lookup.
|
||||
/// </summary>
|
||||
public sealed record ProvcacheBatchLookupResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Entries found in cache, keyed by VeriKey.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, ProvcacheEntry> Hits { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VeriKeys that were not found in cache.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> Misses { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time taken for the lookup in milliseconds.
|
||||
/// </summary>
|
||||
public double ElapsedMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Cache hit rate for this batch (0.0 - 1.0).
|
||||
/// </summary>
|
||||
public double HitRate => Hits.Count == 0 && Misses.Count == 0
|
||||
? 0.0
|
||||
: (double)Hits.Count / (Hits.Count + Misses.Count);
|
||||
}
|
||||
79
src/__Libraries/StellaOps.Provcache/Models/DecisionDigest.cs
Normal file
79
src/__Libraries/StellaOps.Provcache/Models/DecisionDigest.cs
Normal file
@@ -0,0 +1,79 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Canonicalized representation of an evaluation result that can be cached
|
||||
/// and verified for deterministic replay.
|
||||
/// </summary>
|
||||
public sealed record DecisionDigest
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version of this digest format.
|
||||
/// </summary>
|
||||
public required string DigestVersion { get; init; } = "v1";
|
||||
|
||||
/// <summary>
|
||||
/// Composite cache key that uniquely identifies the provenance decision context.
|
||||
/// Format: sha256:<hex>
|
||||
/// </summary>
|
||||
public required string VeriKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of sorted dispositions from the evaluation result.
|
||||
/// Used for quick comparison without full evaluation replay.
|
||||
/// Format: sha256:<hex>
|
||||
/// </summary>
|
||||
public required string VerdictHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root of all evidence chunks used in this decision.
|
||||
/// Enables incremental verification without fetching full evidence.
|
||||
/// Format: sha256:<hex>
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Identifiers needed to replay the evaluation with same inputs.
|
||||
/// Contains feed IDs and rule IDs.
|
||||
/// </summary>
|
||||
public required ReplaySeed ReplaySeed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this digest was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this digest expires and should be re-evaluated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Composite trust score (0-100) indicating decision confidence.
|
||||
/// Based on reachability, SBOM completeness, VEX coverage, policy freshness, and signer trust.
|
||||
/// </summary>
|
||||
public required int TrustScore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Identifiers needed to replay an evaluation with the same inputs.
|
||||
/// </summary>
|
||||
public sealed record ReplaySeed
|
||||
{
|
||||
/// <summary>
|
||||
/// Advisory feed identifiers used in evaluation.
|
||||
/// Example: ["cve-2024", "ghsa-2024", "oval-debian"]
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> FeedIds { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Policy rule identifiers used in evaluation.
|
||||
/// Example: ["default-policy-v2", "org-exceptions"]
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> RuleIds { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Optional: Frozen epoch timestamp for deterministic replay.
|
||||
/// If set, evaluation should use this timestamp instead of current time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? FrozenEpoch { get; init; }
|
||||
}
|
||||
69
src/__Libraries/StellaOps.Provcache/Models/ProvcacheEntry.cs
Normal file
69
src/__Libraries/StellaOps.Provcache/Models/ProvcacheEntry.cs
Normal file
@@ -0,0 +1,69 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Cache entry wrapping a <see cref="DecisionDigest"/> with storage metadata.
|
||||
/// </summary>
|
||||
public sealed record ProvcacheEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Composite cache key (VeriKey) that uniquely identifies this entry.
|
||||
/// Format: sha256:<hex>
|
||||
/// </summary>
|
||||
public required string VeriKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The cached decision digest.
|
||||
/// </summary>
|
||||
public required DecisionDigest Decision { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the policy bundle used for this decision.
|
||||
/// Used for invalidation when policy changes.
|
||||
/// Format: sha256:<hex>
|
||||
/// </summary>
|
||||
public required string PolicyHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the signer certificate set used for this decision.
|
||||
/// Used for invalidation on key rotation or revocation.
|
||||
/// Format: sha256:<hex>
|
||||
/// </summary>
|
||||
public required string SignerSetHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Advisory feed epoch identifier (e.g., "2024-12-24" or "2024-W52").
|
||||
/// Used for invalidation when feeds update.
|
||||
/// </summary>
|
||||
public required string FeedEpoch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this entry was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this entry expires.
|
||||
/// After expiry, the entry should be re-evaluated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of cache hits for this entry.
|
||||
/// Used for cache analytics and LRU decisions.
|
||||
/// </summary>
|
||||
public long HitCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this entry was last accessed.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastAccessedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a copy with incremented hit count and updated access time.
|
||||
/// </summary>
|
||||
public ProvcacheEntry WithHit(DateTimeOffset accessTime) => this with
|
||||
{
|
||||
HitCount = HitCount + 1,
|
||||
LastAccessedAt = accessTime
|
||||
};
|
||||
}
|
||||
149
src/__Libraries/StellaOps.Provcache/ProvcacheOptions.cs
Normal file
149
src/__Libraries/StellaOps.Provcache/ProvcacheOptions.cs
Normal file
@@ -0,0 +1,149 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the Provcache service.
|
||||
/// </summary>
|
||||
public sealed class ProvcacheOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "Provcache";
|
||||
|
||||
/// <summary>
|
||||
/// Default time-to-live for cache entries.
|
||||
/// Default: 24 hours.
|
||||
/// </summary>
|
||||
[Range(typeof(TimeSpan), "00:01:00", "7.00:00:00", ErrorMessage = "DefaultTtl must be between 1 minute and 7 days")]
|
||||
public TimeSpan DefaultTtl { get; set; } = TimeSpan.FromHours(24);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum time-to-live for cache entries.
|
||||
/// Entries cannot be cached longer than this regardless of request.
|
||||
/// Default: 7 days.
|
||||
/// </summary>
|
||||
[Range(typeof(TimeSpan), "00:01:00", "30.00:00:00", ErrorMessage = "MaxTtl must be between 1 minute and 30 days")]
|
||||
public TimeSpan MaxTtl { get; set; } = TimeSpan.FromDays(7);
|
||||
|
||||
/// <summary>
|
||||
/// Time window bucket size for VeriKey computation.
|
||||
/// Entries within the same time window share cache keys.
|
||||
/// Smaller buckets = more granular invalidation but lower hit rate.
|
||||
/// Default: 1 hour.
|
||||
/// </summary>
|
||||
[Range(typeof(TimeSpan), "00:01:00", "24:00:00", ErrorMessage = "TimeWindowBucket must be between 1 minute and 24 hours")]
|
||||
public TimeSpan TimeWindowBucket { get; set; } = TimeSpan.FromHours(1);
|
||||
|
||||
/// <summary>
|
||||
/// Key prefix for Valkey storage.
|
||||
/// </summary>
|
||||
public string ValkeyKeyPrefix { get; set; } = "stellaops:prov:";
|
||||
|
||||
/// <summary>
|
||||
/// Enable write-behind to Postgres for persistence.
|
||||
/// When true, cache writes are immediately persisted to Valkey and
|
||||
/// asynchronously written to Postgres.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool EnableWriteBehind { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Interval for flushing write-behind queue to Postgres.
|
||||
/// Default: 5 seconds.
|
||||
/// </summary>
|
||||
[Range(typeof(TimeSpan), "00:00:01", "00:05:00", ErrorMessage = "WriteBehindFlushInterval must be between 1 second and 5 minutes")]
|
||||
public TimeSpan WriteBehindFlushInterval { get; set; } = TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum batch size for write-behind flush.
|
||||
/// Larger batches are more efficient but increase memory usage.
|
||||
/// Default: 100.
|
||||
/// </summary>
|
||||
[Range(1, 10000, ErrorMessage = "WriteBehindMaxBatchSize must be between 1 and 10000")]
|
||||
public int WriteBehindMaxBatchSize { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Write-behind batch size (alias for WriteBehindMaxBatchSize).
|
||||
/// </summary>
|
||||
public int WriteBehindBatchSize => WriteBehindMaxBatchSize;
|
||||
|
||||
/// <summary>
|
||||
/// Write-behind flush interval in milliseconds.
|
||||
/// </summary>
|
||||
public int WriteBehindFlushIntervalMs => (int)WriteBehindFlushInterval.TotalMilliseconds;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum capacity of the write-behind queue.
|
||||
/// When full, writes will block until space is available.
|
||||
/// Default: 10000.
|
||||
/// </summary>
|
||||
[Range(100, 1000000, ErrorMessage = "WriteBehindQueueCapacity must be between 100 and 1000000")]
|
||||
public int WriteBehindQueueCapacity { get; set; } = 10000;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry attempts for failed write-behind operations.
|
||||
/// Default: 3.
|
||||
/// </summary>
|
||||
[Range(0, 10, ErrorMessage = "WriteBehindMaxRetries must be between 0 and 10")]
|
||||
public int WriteBehindMaxRetries { get; set; } = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Chunk size for evidence storage in bytes.
|
||||
/// Default: 64 KB.
|
||||
/// </summary>
|
||||
[Range(1024, 1048576, ErrorMessage = "ChunkSize must be between 1 KB and 1 MB")]
|
||||
public int ChunkSize { get; set; } = 65536;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum chunks per cache entry.
|
||||
/// Limits evidence size for a single entry.
|
||||
/// Default: 1000.
|
||||
/// </summary>
|
||||
[Range(1, 100000, ErrorMessage = "MaxChunksPerEntry must be between 1 and 100000")]
|
||||
public int MaxChunksPerEntry { get; set; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Allow clients to bypass cache and force re-evaluation.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool AllowCacheBypass { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Current digest version for serialization.
|
||||
/// </summary>
|
||||
public string DigestVersion { get; set; } = "v1";
|
||||
|
||||
/// <summary>
|
||||
/// Hash algorithm for VeriKey and digest computation.
|
||||
/// Default: SHA256.
|
||||
/// </summary>
|
||||
public string HashAlgorithm { get; set; } = "SHA256";
|
||||
|
||||
/// <summary>
|
||||
/// Enable Valkey cache layer.
|
||||
/// When false, only Postgres is used (slower but simpler).
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool EnableValkeyCache { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Enable sliding expiration for cached entries.
|
||||
/// When true, TTL is refreshed on each cache hit.
|
||||
/// Default: false.
|
||||
/// </summary>
|
||||
public bool SlidingExpiration { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Compute time window epoch from a timestamp based on the configured bucket size.
|
||||
/// </summary>
|
||||
/// <param name="timestamp">The timestamp to compute the epoch for.</param>
|
||||
/// <returns>The epoch identifier string.</returns>
|
||||
public string ComputeTimeWindow(DateTimeOffset timestamp)
|
||||
{
|
||||
var bucketTicks = TimeWindowBucket.Ticks;
|
||||
var epoch = timestamp.UtcTicks / bucketTicks * bucketTicks;
|
||||
return new DateTimeOffset(epoch, TimeSpan.Zero).ToString("yyyy-MM-ddTHH:mm:ssZ");
|
||||
}
|
||||
}
|
||||
392
src/__Libraries/StellaOps.Provcache/ProvcacheService.cs
Normal file
392
src/__Libraries/StellaOps.Provcache/ProvcacheService.cs
Normal file
@@ -0,0 +1,392 @@
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IProvcacheService"/>.
|
||||
/// Orchestrates cache store and repository with metrics and invalidation logic.
|
||||
/// </summary>
|
||||
public sealed class ProvcacheService : IProvcacheService
|
||||
{
|
||||
private readonly IProvcacheStore _store;
|
||||
private readonly IProvcacheRepository _repository;
|
||||
private readonly IWriteBehindQueue? _writeBehindQueue;
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly ILogger<ProvcacheService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
// Metrics counters
|
||||
private long _totalRequests;
|
||||
private long _totalHits;
|
||||
private long _totalMisses;
|
||||
private long _totalInvalidations;
|
||||
private readonly object _metricsLock = new();
|
||||
private readonly List<double> _latencies = new(1000);
|
||||
|
||||
public ProvcacheService(
|
||||
IProvcacheStore store,
|
||||
IProvcacheRepository repository,
|
||||
IOptions<ProvcacheOptions> options,
|
||||
ILogger<ProvcacheService> logger,
|
||||
IWriteBehindQueue? writeBehindQueue = null,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_store = store ?? throw new ArgumentNullException(nameof(store));
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_writeBehindQueue = writeBehindQueue;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ProvcacheServiceResult> GetAsync(
|
||||
string veriKey,
|
||||
bool bypassCache = false,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
|
||||
|
||||
if (bypassCache && _options.AllowCacheBypass)
|
||||
{
|
||||
_logger.LogDebug("Cache bypass requested for VeriKey {VeriKey}", veriKey);
|
||||
return ProvcacheServiceResult.Bypassed();
|
||||
}
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
using var activity = ProvcacheTelemetry.StartGetActivity(veriKey);
|
||||
|
||||
try
|
||||
{
|
||||
var result = await _store.GetAsync(veriKey, cancellationToken).ConfigureAwait(false);
|
||||
sw.Stop();
|
||||
|
||||
RecordMetrics(result.IsHit, sw.Elapsed.TotalMilliseconds);
|
||||
|
||||
if (result.IsHit && result.Entry is not null)
|
||||
{
|
||||
// Check if expired
|
||||
if (result.Entry.ExpiresAt <= _timeProvider.GetUtcNow())
|
||||
{
|
||||
_logger.LogDebug("Cache entry for VeriKey {VeriKey} is expired", veriKey);
|
||||
ProvcacheTelemetry.RecordRequest("get", "expired");
|
||||
return ProvcacheServiceResult.Expired(result.Entry, sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Cache hit for VeriKey {VeriKey} from {Source} in {ElapsedMs}ms",
|
||||
veriKey,
|
||||
result.Source,
|
||||
sw.Elapsed.TotalMilliseconds);
|
||||
|
||||
ProvcacheTelemetry.MarkCacheHit(activity, result.Source ?? "valkey");
|
||||
ProvcacheTelemetry.RecordHit(result.Source ?? "valkey");
|
||||
ProvcacheTelemetry.RecordRequest("get", ProvcacheTelemetry.ResultHit);
|
||||
ProvcacheTelemetry.RecordLatency("get", sw.Elapsed);
|
||||
|
||||
return ProvcacheServiceResult.Hit(result.Entry, result.Source!, sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Cache miss - try repository
|
||||
var dbEntry = await _repository.GetAsync(veriKey, cancellationToken).ConfigureAwait(false);
|
||||
sw.Stop();
|
||||
|
||||
if (dbEntry is not null)
|
||||
{
|
||||
// Check if expired
|
||||
if (dbEntry.ExpiresAt <= _timeProvider.GetUtcNow())
|
||||
{
|
||||
_logger.LogDebug("Database entry for VeriKey {VeriKey} is expired", veriKey);
|
||||
ProvcacheTelemetry.RecordRequest("get", "expired");
|
||||
return ProvcacheServiceResult.Expired(dbEntry, sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
// Backfill cache
|
||||
await _store.SetAsync(dbEntry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Cache backfill for VeriKey {VeriKey} from postgres in {ElapsedMs}ms",
|
||||
veriKey,
|
||||
sw.Elapsed.TotalMilliseconds);
|
||||
|
||||
ProvcacheTelemetry.MarkCacheHit(activity, "postgres");
|
||||
ProvcacheTelemetry.RecordHit("postgres");
|
||||
ProvcacheTelemetry.RecordRequest("get", ProvcacheTelemetry.ResultHit);
|
||||
ProvcacheTelemetry.RecordLatency("get", sw.Elapsed);
|
||||
|
||||
return ProvcacheServiceResult.Hit(dbEntry, "postgres", sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
|
||||
ProvcacheTelemetry.MarkCacheMiss(activity);
|
||||
ProvcacheTelemetry.RecordMiss();
|
||||
ProvcacheTelemetry.RecordRequest("get", ProvcacheTelemetry.ResultMiss);
|
||||
ProvcacheTelemetry.RecordLatency("get", sw.Elapsed);
|
||||
|
||||
_logger.LogDebug("Cache miss for VeriKey {VeriKey} in {ElapsedMs}ms", veriKey, sw.Elapsed.TotalMilliseconds);
|
||||
return ProvcacheServiceResult.Miss(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
ProvcacheTelemetry.MarkError(activity, ex.Message);
|
||||
ProvcacheTelemetry.RecordRequest("get", ProvcacheTelemetry.ResultError);
|
||||
_logger.LogError(ex, "Error getting cache entry for VeriKey {VeriKey}", veriKey);
|
||||
return ProvcacheServiceResult.Miss(sw.Elapsed.TotalMilliseconds);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> SetAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
using var activity = ProvcacheTelemetry.StartSetActivity(entry.VeriKey, entry.TrustScore);
|
||||
|
||||
try
|
||||
{
|
||||
// Store in cache
|
||||
await _store.SetAsync(entry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Persist to repository (write-behind if enabled)
|
||||
if (_options.EnableWriteBehind && _writeBehindQueue is not null)
|
||||
{
|
||||
// Enqueue for batched async persistence
|
||||
await _writeBehindQueue.EnqueueAsync(entry, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Synchronous write to repository
|
||||
await _repository.UpsertAsync(entry, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
ProvcacheTelemetry.RecordRequest("set", ProvcacheTelemetry.ResultCreated);
|
||||
ProvcacheTelemetry.RecordLatency("set", sw.Elapsed);
|
||||
|
||||
_logger.LogDebug("Stored cache entry for VeriKey {VeriKey}", entry.VeriKey);
|
||||
return true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
ProvcacheTelemetry.MarkError(activity, ex.Message);
|
||||
ProvcacheTelemetry.RecordRequest("set", ProvcacheTelemetry.ResultError);
|
||||
_logger.LogError(ex, "Error storing cache entry for VeriKey {VeriKey}", entry.VeriKey);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ProvcacheEntry> GetOrComputeAsync(
|
||||
string veriKey,
|
||||
Func<CancellationToken, Task<ProvcacheEntry>> factory,
|
||||
bool bypassCache = false,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
|
||||
ArgumentNullException.ThrowIfNull(factory);
|
||||
|
||||
if (!bypassCache || !_options.AllowCacheBypass)
|
||||
{
|
||||
var result = await GetAsync(veriKey, bypassCache: false, cancellationToken).ConfigureAwait(false);
|
||||
if (result.WasCached && result.Entry is not null)
|
||||
{
|
||||
return result.Entry;
|
||||
}
|
||||
}
|
||||
|
||||
// Compute new entry
|
||||
var entry = await factory(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Store it
|
||||
await SetAsync(entry, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> InvalidateAsync(
|
||||
string veriKey,
|
||||
string? reason = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
|
||||
|
||||
using var activity = ProvcacheTelemetry.StartInvalidateActivity("verikey", veriKey);
|
||||
|
||||
try
|
||||
{
|
||||
var invalidated = await _store.InvalidateAsync(veriKey, cancellationToken).ConfigureAwait(false);
|
||||
await _repository.DeleteAsync(veriKey, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Interlocked.Increment(ref _totalInvalidations);
|
||||
ProvcacheTelemetry.RecordInvalidation("verikey");
|
||||
ProvcacheTelemetry.RecordRequest("invalidate", ProvcacheTelemetry.ResultInvalidated);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Invalidated cache entry for VeriKey {VeriKey}, reason: {Reason}",
|
||||
veriKey,
|
||||
reason ?? "unspecified");
|
||||
|
||||
return invalidated;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
ProvcacheTelemetry.MarkError(activity, ex.Message);
|
||||
ProvcacheTelemetry.RecordRequest("invalidate", ProvcacheTelemetry.ResultError);
|
||||
_logger.LogError(ex, "Error invalidating cache entry for VeriKey {VeriKey}", veriKey);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<InvalidationResult> InvalidateByAsync(
|
||||
InvalidationRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var invalidationType = request.Type?.ToString().ToLowerInvariant() ?? "unknown";
|
||||
using var activity = ProvcacheTelemetry.StartInvalidateActivity(invalidationType, request.Value);
|
||||
|
||||
try
|
||||
{
|
||||
long affected = 0;
|
||||
|
||||
switch (request.Type)
|
||||
{
|
||||
case InvalidationType.PolicyHash:
|
||||
affected = await _repository.DeleteByPolicyHashAsync(request.Value, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await _store.InvalidateByPatternAsync($"*policy:{request.Value}*", cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case InvalidationType.SignerSetHash:
|
||||
affected = await _repository.DeleteBySignerSetHashAsync(request.Value, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await _store.InvalidateByPatternAsync($"*signer:{request.Value}*", cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case InvalidationType.FeedEpochOlderThan:
|
||||
affected = await _repository.DeleteByFeedEpochOlderThanAsync(request.Value, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case InvalidationType.Pattern:
|
||||
affected = await _store.InvalidateByPatternAsync(request.Value, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
|
||||
case InvalidationType.Expired:
|
||||
affected = await _repository.DeleteExpiredAsync(_timeProvider.GetUtcNow(), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
}
|
||||
|
||||
Interlocked.Add(ref _totalInvalidations, affected);
|
||||
ProvcacheTelemetry.RecordInvalidation(invalidationType, affected);
|
||||
ProvcacheTelemetry.RecordRequest("invalidate", ProvcacheTelemetry.ResultInvalidated);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Invalidated {Count} cache entries by {Type}={Value}, reason: {Reason}, actor: {Actor}",
|
||||
affected,
|
||||
request.Type,
|
||||
request.Value,
|
||||
request.Reason,
|
||||
request.Actor);
|
||||
|
||||
return new InvalidationResult
|
||||
{
|
||||
EntriesAffected = affected,
|
||||
Request = request,
|
||||
Timestamp = _timeProvider.GetUtcNow(),
|
||||
WasLogged = true
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
ProvcacheTelemetry.MarkError(activity, ex.Message);
|
||||
ProvcacheTelemetry.RecordRequest("invalidate", ProvcacheTelemetry.ResultError);
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Error invalidating cache entries by {Type}={Value}",
|
||||
request.Type,
|
||||
request.Value);
|
||||
|
||||
return new InvalidationResult
|
||||
{
|
||||
EntriesAffected = 0,
|
||||
Request = request,
|
||||
Timestamp = _timeProvider.GetUtcNow(),
|
||||
WasLogged = false
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ProvcacheMetrics> GetMetricsAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var stats = await _repository.GetStatisticsAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
double avgLatency, p99Latency;
|
||||
lock (_metricsLock)
|
||||
{
|
||||
avgLatency = _latencies.Count > 0 ? _latencies.Average() : 0;
|
||||
p99Latency = _latencies.Count > 0
|
||||
? _latencies.OrderBy(x => x).ElementAt((int)(_latencies.Count * 0.99))
|
||||
: 0;
|
||||
}
|
||||
|
||||
return new ProvcacheMetrics
|
||||
{
|
||||
TotalRequests = Interlocked.Read(ref _totalRequests),
|
||||
TotalHits = Interlocked.Read(ref _totalHits),
|
||||
TotalMisses = Interlocked.Read(ref _totalMisses),
|
||||
TotalInvalidations = Interlocked.Read(ref _totalInvalidations),
|
||||
CurrentEntryCount = stats.TotalEntries,
|
||||
AvgLatencyMs = avgLatency,
|
||||
P99LatencyMs = p99Latency,
|
||||
ValkeyCacheHealthy = true, // TODO: Add health check
|
||||
PostgresRepositoryHealthy = true, // TODO: Add health check
|
||||
CollectedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<long> PruneExpiredAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var pruned = await _repository.DeleteExpiredAsync(_timeProvider.GetUtcNow(), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Pruned {Count} expired cache entries", pruned);
|
||||
return pruned;
|
||||
}
|
||||
|
||||
private void RecordMetrics(bool isHit, double latencyMs)
|
||||
{
|
||||
Interlocked.Increment(ref _totalRequests);
|
||||
|
||||
if (isHit)
|
||||
{
|
||||
Interlocked.Increment(ref _totalHits);
|
||||
}
|
||||
else
|
||||
{
|
||||
Interlocked.Increment(ref _totalMisses);
|
||||
}
|
||||
|
||||
lock (_metricsLock)
|
||||
{
|
||||
_latencies.Add(latencyMs);
|
||||
|
||||
// Keep only last 1000 latencies for percentile calculation
|
||||
if (_latencies.Count > 1000)
|
||||
{
|
||||
_latencies.RemoveAt(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering Provcache services.
|
||||
/// </summary>
|
||||
public static class ProvcacheServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds Provcache services to the service collection.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configuration">The configuration section.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddProvcache(
|
||||
this IServiceCollection services,
|
||||
IConfiguration configuration)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configuration);
|
||||
|
||||
// Register options
|
||||
services.Configure<ProvcacheOptions>(configuration.GetSection(ProvcacheOptions.SectionName));
|
||||
|
||||
// Register core services
|
||||
services.AddSingleton<IProvcacheService, ProvcacheService>();
|
||||
|
||||
// Register write-behind queue as hosted service
|
||||
services.AddSingleton<WriteBehindQueue>();
|
||||
services.AddSingleton<IWriteBehindQueue>(sp => sp.GetRequiredService<WriteBehindQueue>());
|
||||
services.AddHostedService(sp => sp.GetRequiredService<WriteBehindQueue>());
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds Provcache services with custom options.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">Action to configure options.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddProvcache(
|
||||
this IServiceCollection services,
|
||||
Action<ProvcacheOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
// Register options
|
||||
services.Configure(configure);
|
||||
|
||||
// Register core services
|
||||
services.AddSingleton<IProvcacheService, ProvcacheService>();
|
||||
|
||||
// Register write-behind queue as hosted service
|
||||
services.AddSingleton<WriteBehindQueue>();
|
||||
services.AddSingleton<IWriteBehindQueue>(sp => sp.GetRequiredService<WriteBehindQueue>());
|
||||
services.AddHostedService(sp => sp.GetRequiredService<WriteBehindQueue>());
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Valkey-backed cache store implementation.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddProvcacheValkeyStore(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
// Note: Actual Valkey store implementation would be registered here
|
||||
// services.AddSingleton<IProvcacheStore, ValkeyProvcacheStore>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the Postgres repository implementation.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddProvcachePostgresRepository(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
// Note: Actual Postgres repository implementation would be registered here
|
||||
// services.AddSingleton<IProvcacheRepository, PostgresProvcacheRepository>();
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
369
src/__Libraries/StellaOps.Provcache/ProvcacheTelemetry.cs
Normal file
369
src/__Libraries/StellaOps.Provcache/ProvcacheTelemetry.cs
Normal file
@@ -0,0 +1,369 @@
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Centralized telemetry for Provcache operations using OpenTelemetry conventions.
|
||||
/// Provides activity tracing for cache operations and metrics for monitoring.
|
||||
/// </summary>
|
||||
public static class ProvcacheTelemetry
|
||||
{
|
||||
/// <summary>
|
||||
/// Activity source name for OpenTelemetry tracing.
|
||||
/// </summary>
|
||||
public const string ActivitySourceName = "StellaOps.Provcache";
|
||||
|
||||
/// <summary>
|
||||
/// Meter name for Prometheus metrics.
|
||||
/// </summary>
|
||||
public const string MeterName = "StellaOps.Provcache";
|
||||
|
||||
#region Cache Result Constants
|
||||
|
||||
public const string ResultHit = "hit";
|
||||
public const string ResultMiss = "miss";
|
||||
public const string ResultError = "error";
|
||||
public const string ResultCreated = "created";
|
||||
public const string ResultUpdated = "updated";
|
||||
public const string ResultInvalidated = "invalidated";
|
||||
|
||||
#endregion
|
||||
|
||||
#region Metric Names
|
||||
|
||||
private const string RequestsTotalMetric = "provcache_requests_total";
|
||||
private const string HitsTotalMetric = "provcache_hits_total";
|
||||
private const string MissesTotalMetric = "provcache_misses_total";
|
||||
private const string InvalidationsTotalMetric = "provcache_invalidations_total";
|
||||
private const string WriteBehindTotalMetric = "provcache_writebehind_total";
|
||||
private const string WriteBehindQueueSizeMetric = "provcache_writebehind_queue_size";
|
||||
private const string LatencySecondsMetric = "provcache_latency_seconds";
|
||||
private const string EntriesSizeMetric = "provcache_entry_bytes";
|
||||
|
||||
#endregion
|
||||
|
||||
private static readonly ActivitySource ActivitySource = new(ActivitySourceName);
|
||||
private static readonly Meter Meter = new(MeterName);
|
||||
|
||||
#region Counters
|
||||
|
||||
private static readonly Counter<long> RequestsCounter = Meter.CreateCounter<long>(
|
||||
RequestsTotalMetric,
|
||||
unit: "count",
|
||||
description: "Total cache requests by operation and result.");
|
||||
|
||||
private static readonly Counter<long> HitsCounter = Meter.CreateCounter<long>(
|
||||
HitsTotalMetric,
|
||||
unit: "count",
|
||||
description: "Total cache hits by source (valkey, postgres).");
|
||||
|
||||
private static readonly Counter<long> MissesCounter = Meter.CreateCounter<long>(
|
||||
MissesTotalMetric,
|
||||
unit: "count",
|
||||
description: "Total cache misses.");
|
||||
|
||||
private static readonly Counter<long> InvalidationsCounter = Meter.CreateCounter<long>(
|
||||
InvalidationsTotalMetric,
|
||||
unit: "count",
|
||||
description: "Total cache invalidations by type.");
|
||||
|
||||
private static readonly Counter<long> WriteBehindCounter = Meter.CreateCounter<long>(
|
||||
WriteBehindTotalMetric,
|
||||
unit: "count",
|
||||
description: "Write-behind operations by result.");
|
||||
|
||||
#endregion
|
||||
|
||||
#region Histograms
|
||||
|
||||
private static readonly Histogram<double> LatencyHistogram = Meter.CreateHistogram<double>(
|
||||
LatencySecondsMetric,
|
||||
unit: "s",
|
||||
description: "Cache operation latency in seconds.");
|
||||
|
||||
private static readonly Histogram<long> EntrySizeHistogram = Meter.CreateHistogram<long>(
|
||||
EntriesSizeMetric,
|
||||
unit: "bytes",
|
||||
description: "Size of cache entries in bytes.");
|
||||
|
||||
#endregion
|
||||
|
||||
#region Gauges
|
||||
|
||||
private static int _writeBehindQueueSize;
|
||||
|
||||
/// <summary>
|
||||
/// Observable gauge for write-behind queue size.
|
||||
/// </summary>
|
||||
public static readonly ObservableGauge<int> WriteBehindQueueGauge = Meter.CreateObservableGauge(
|
||||
WriteBehindQueueSizeMetric,
|
||||
() => _writeBehindQueueSize,
|
||||
unit: "items",
|
||||
description: "Current write-behind queue size.");
|
||||
|
||||
/// <summary>
|
||||
/// Update the write-behind queue size gauge.
|
||||
/// </summary>
|
||||
/// <param name="size">Current queue size.</param>
|
||||
public static void SetWriteBehindQueueSize(int size)
|
||||
{
|
||||
_writeBehindQueueSize = size;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Activity Tracing
|
||||
|
||||
/// <summary>
|
||||
/// Start a cache lookup activity.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The VeriKey being looked up.</param>
|
||||
/// <returns>The activity or null if tracing is not enabled.</returns>
|
||||
public static Activity? StartGetActivity(string veriKey)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity("provcache.get", ActivityKind.Internal);
|
||||
if (activity is null) return null;
|
||||
|
||||
activity.SetTag("provcache.verikey", TruncateVeriKey(veriKey));
|
||||
return activity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start a cache store activity.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The VeriKey being stored.</param>
|
||||
/// <param name="trustScore">The trust score of the entry.</param>
|
||||
/// <returns>The activity or null if tracing is not enabled.</returns>
|
||||
public static Activity? StartSetActivity(string veriKey, int trustScore)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity("provcache.set", ActivityKind.Internal);
|
||||
if (activity is null) return null;
|
||||
|
||||
activity.SetTag("provcache.verikey", TruncateVeriKey(veriKey));
|
||||
activity.SetTag("provcache.trust_score", trustScore);
|
||||
return activity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start an invalidation activity.
|
||||
/// </summary>
|
||||
/// <param name="invalidationType">The type of invalidation (verikey, policy_hash, signer_set_hash, feed_epoch).</param>
|
||||
/// <param name="targetValue">The target value for invalidation.</param>
|
||||
/// <returns>The activity or null if tracing is not enabled.</returns>
|
||||
public static Activity? StartInvalidateActivity(string invalidationType, string? targetValue)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity("provcache.invalidate", ActivityKind.Internal);
|
||||
if (activity is null) return null;
|
||||
|
||||
activity.SetTag("provcache.invalidation_type", invalidationType);
|
||||
if (!string.IsNullOrWhiteSpace(targetValue))
|
||||
{
|
||||
activity.SetTag("provcache.target", TruncateVeriKey(targetValue));
|
||||
}
|
||||
|
||||
return activity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start a write-behind flush activity.
|
||||
/// </summary>
|
||||
/// <param name="batchSize">Number of entries in the batch.</param>
|
||||
/// <returns>The activity or null if tracing is not enabled.</returns>
|
||||
public static Activity? StartWriteBehindFlushActivity(int batchSize)
|
||||
{
|
||||
var activity = ActivitySource.StartActivity("provcache.writebehind.flush", ActivityKind.Internal);
|
||||
if (activity is null) return null;
|
||||
|
||||
activity.SetTag("provcache.batch_size", batchSize);
|
||||
return activity;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start a VeriKey construction activity.
|
||||
/// </summary>
|
||||
/// <returns>The activity or null if tracing is not enabled.</returns>
|
||||
public static Activity? StartVeriKeyBuildActivity()
|
||||
{
|
||||
return ActivitySource.StartActivity("provcache.verikey.build", ActivityKind.Internal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Start a DecisionDigest construction activity.
|
||||
/// </summary>
|
||||
/// <returns>The activity or null if tracing is not enabled.</returns>
|
||||
public static Activity? StartDecisionDigestBuildActivity()
|
||||
{
|
||||
return ActivitySource.StartActivity("provcache.digest.build", ActivityKind.Internal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mark an activity as a cache hit.
|
||||
/// </summary>
|
||||
/// <param name="activity">The activity to mark.</param>
|
||||
/// <param name="source">The source of the hit (valkey, postgres).</param>
|
||||
public static void MarkCacheHit(Activity? activity, string source)
|
||||
{
|
||||
activity?.SetTag("provcache.result", ResultHit);
|
||||
activity?.SetTag("provcache.source", source);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mark an activity as a cache miss.
|
||||
/// </summary>
|
||||
/// <param name="activity">The activity to mark.</param>
|
||||
public static void MarkCacheMiss(Activity? activity)
|
||||
{
|
||||
activity?.SetTag("provcache.result", ResultMiss);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mark an activity as having an error.
|
||||
/// </summary>
|
||||
/// <param name="activity">The activity to mark.</param>
|
||||
/// <param name="errorMessage">The error message.</param>
|
||||
public static void MarkError(Activity? activity, string errorMessage)
|
||||
{
|
||||
activity?.SetStatus(ActivityStatusCode.Error, errorMessage);
|
||||
activity?.SetTag("provcache.result", ResultError);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Metrics Recording
|
||||
|
||||
/// <summary>
|
||||
/// Record a cache request.
|
||||
/// </summary>
|
||||
/// <param name="operation">The operation type (get, set, invalidate).</param>
|
||||
/// <param name="result">The result (hit, miss, error, created, etc.).</param>
|
||||
public static void RecordRequest(string operation, string result)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(operation) || string.IsNullOrWhiteSpace(result))
|
||||
return;
|
||||
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "operation", operation },
|
||||
{ "result", result }
|
||||
};
|
||||
|
||||
RequestsCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record a cache hit.
|
||||
/// </summary>
|
||||
/// <param name="source">The source of the hit (valkey, postgres).</param>
|
||||
public static void RecordHit(string source)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(source))
|
||||
return;
|
||||
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "source", source }
|
||||
};
|
||||
|
||||
HitsCounter.Add(1, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record a cache miss.
|
||||
/// </summary>
|
||||
public static void RecordMiss()
|
||||
{
|
||||
MissesCounter.Add(1);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record an invalidation.
|
||||
/// </summary>
|
||||
/// <param name="invalidationType">The type of invalidation.</param>
|
||||
/// <param name="count">Number of entries invalidated.</param>
|
||||
public static void RecordInvalidation(string invalidationType, long count = 1)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(invalidationType))
|
||||
return;
|
||||
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "type", invalidationType }
|
||||
};
|
||||
|
||||
InvalidationsCounter.Add(count, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record a write-behind operation result.
|
||||
/// </summary>
|
||||
/// <param name="result">The result (ok, retry, failed).</param>
|
||||
/// <param name="count">Number of entries.</param>
|
||||
public static void RecordWriteBehind(string result, long count = 1)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(result))
|
||||
return;
|
||||
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "result", result }
|
||||
};
|
||||
|
||||
WriteBehindCounter.Add(count, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record operation latency.
|
||||
/// </summary>
|
||||
/// <param name="operation">The operation type.</param>
|
||||
/// <param name="duration">The operation duration.</param>
|
||||
public static void RecordLatency(string operation, TimeSpan duration)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(operation))
|
||||
return;
|
||||
|
||||
var seconds = duration.TotalSeconds;
|
||||
if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds < 0)
|
||||
seconds = 0d;
|
||||
|
||||
var tags = new TagList
|
||||
{
|
||||
{ "operation", operation }
|
||||
};
|
||||
|
||||
LatencyHistogram.Record(seconds, tags);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record cache entry size.
|
||||
/// </summary>
|
||||
/// <param name="bytes">The size in bytes.</param>
|
||||
public static void RecordEntrySize(long bytes)
|
||||
{
|
||||
if (bytes < 0)
|
||||
return;
|
||||
|
||||
EntrySizeHistogram.Record(bytes);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
/// <summary>
|
||||
/// Truncate a VeriKey for logging (avoid huge trace payloads).
|
||||
/// </summary>
|
||||
private static string TruncateVeriKey(string? veriKey)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(veriKey))
|
||||
return "(empty)";
|
||||
|
||||
// Keep prefix and first 16 chars of hash
|
||||
if (veriKey.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) && veriKey.Length > 23)
|
||||
return veriKey[..23] + "...";
|
||||
|
||||
return veriKey.Length > 32 ? veriKey[..32] + "..." : veriKey;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<RootNamespace>StellaOps.Provcache</RootNamespace>
|
||||
<AssemblyName>StellaOps.Provcache</AssemblyName>
|
||||
<Description>Provenance Cache for StellaOps - Maximizing trust evidence density</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options.DataAnnotations" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Messaging/StellaOps.Messaging.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
389
src/__Libraries/StellaOps.Provcache/VeriKeyBuilder.cs
Normal file
389
src/__Libraries/StellaOps.Provcache/VeriKeyBuilder.cs
Normal file
@@ -0,0 +1,389 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Fluent builder for constructing a VeriKey (provenance identity key).
|
||||
/// VeriKey is a composite hash that uniquely identifies a provenance decision context.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// VeriKey = SHA256(source_hash || sbom_hash || vex_hash_set_hash || merge_policy_hash || signer_set_hash || time_window)
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// Each component ensures cache invalidation when relevant inputs change:
|
||||
/// <list type="bullet">
|
||||
/// <item><c>source_hash</c>: Different artifacts get different keys</item>
|
||||
/// <item><c>sbom_hash</c>: SBOM changes (new packages) create new key</item>
|
||||
/// <item><c>vex_hash_set</c>: VEX updates create new key</item>
|
||||
/// <item><c>policy_hash</c>: Policy changes create new key</item>
|
||||
/// <item><c>signer_set_hash</c>: Key rotation creates new key (security)</item>
|
||||
/// <item><c>time_window</c>: Temporal bucketing enables controlled expiry</item>
|
||||
/// </list>
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public sealed class VeriKeyBuilder
|
||||
{
|
||||
private string? _sourceHash;
|
||||
private string? _sbomHash;
|
||||
private string? _vexHashSetHash;
|
||||
private string? _mergePolicyHash;
|
||||
private string? _signerSetHash;
|
||||
private string? _timeWindow;
|
||||
private readonly ProvcacheOptions _options;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new VeriKeyBuilder with default options.
|
||||
/// </summary>
|
||||
public VeriKeyBuilder() : this(new ProvcacheOptions())
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new VeriKeyBuilder with the specified options.
|
||||
/// </summary>
|
||||
/// <param name="options">Provcache configuration options.</param>
|
||||
public VeriKeyBuilder(ProvcacheOptions options)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the source artifact digest (image/artifact content-addressed hash).
|
||||
/// </summary>
|
||||
/// <param name="sourceHash">The artifact digest (e.g., sha256:abc123...).</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
/// <exception cref="ArgumentException">If sourceHash is null or empty.</exception>
|
||||
public VeriKeyBuilder WithSourceHash(string sourceHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sourceHash))
|
||||
throw new ArgumentException("Source hash cannot be null or empty.", nameof(sourceHash));
|
||||
|
||||
_sourceHash = NormalizeHash(sourceHash);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the SBOM canonical hash.
|
||||
/// Automatically canonicalizes the SBOM content before hashing if raw bytes are provided.
|
||||
/// </summary>
|
||||
/// <param name="sbomHash">The SBOM canonical hash.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithSbomHash(string sbomHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sbomHash))
|
||||
throw new ArgumentException("SBOM hash cannot be null or empty.", nameof(sbomHash));
|
||||
|
||||
_sbomHash = NormalizeHash(sbomHash);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SBOM hash from raw SBOM bytes using canonical serialization.
|
||||
/// </summary>
|
||||
/// <param name="sbomBytes">Raw SBOM content bytes.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithSbomBytes(ReadOnlySpan<byte> sbomBytes)
|
||||
{
|
||||
_sbomHash = ComputeHash(sbomBytes);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the VEX hash set hash (sorted aggregation of VEX statement hashes).
|
||||
/// </summary>
|
||||
/// <param name="vexHashSetHash">The pre-computed VEX hash set hash.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithVexHashSet(string vexHashSetHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(vexHashSetHash))
|
||||
throw new ArgumentException("VEX hash set hash cannot be null or empty.", nameof(vexHashSetHash));
|
||||
|
||||
_vexHashSetHash = NormalizeHash(vexHashSetHash);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes VEX hash set from individual VEX statement hashes.
|
||||
/// Hashes are sorted lexicographically before aggregation for determinism.
|
||||
/// </summary>
|
||||
/// <param name="vexStatementHashes">Individual VEX statement hashes.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithVexStatementHashes(IEnumerable<string> vexStatementHashes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(vexStatementHashes);
|
||||
|
||||
// Sort hashes for deterministic aggregation
|
||||
var sortedHashes = vexStatementHashes
|
||||
.Where(h => !string.IsNullOrWhiteSpace(h))
|
||||
.Select(NormalizeHash)
|
||||
.Order(StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (sortedHashes.Count == 0)
|
||||
{
|
||||
// Empty VEX set gets a well-known hash
|
||||
_vexHashSetHash = ComputeHash(Encoding.UTF8.GetBytes("empty-vex-set"));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Concatenate sorted hashes and hash the result
|
||||
var concatenated = string.Join("|", sortedHashes);
|
||||
_vexHashSetHash = ComputeHash(Encoding.UTF8.GetBytes(concatenated));
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the merge policy hash (PolicyBundle digest).
|
||||
/// </summary>
|
||||
/// <param name="policyHash">The policy bundle hash.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithMergePolicyHash(string policyHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(policyHash))
|
||||
throw new ArgumentException("Policy hash cannot be null or empty.", nameof(policyHash));
|
||||
|
||||
_mergePolicyHash = NormalizeHash(policyHash);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes policy hash from raw policy bundle bytes.
|
||||
/// </summary>
|
||||
/// <param name="policyBytes">Raw policy bundle content bytes.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithMergePolicyBytes(ReadOnlySpan<byte> policyBytes)
|
||||
{
|
||||
_mergePolicyHash = ComputeHash(policyBytes);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the signer set hash (sorted certificate chain hashes).
|
||||
/// </summary>
|
||||
/// <param name="signerSetHash">The pre-computed signer set hash.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithSignerSetHash(string signerSetHash)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(signerSetHash))
|
||||
throw new ArgumentException("Signer set hash cannot be null or empty.", nameof(signerSetHash));
|
||||
|
||||
_signerSetHash = NormalizeHash(signerSetHash);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes signer set hash from individual certificate hashes.
|
||||
/// Hashes are sorted lexicographically before aggregation for determinism.
|
||||
/// </summary>
|
||||
/// <param name="certificateHashes">Individual certificate hashes.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithCertificateHashes(IEnumerable<string> certificateHashes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(certificateHashes);
|
||||
|
||||
// Sort hashes for deterministic aggregation
|
||||
var sortedHashes = certificateHashes
|
||||
.Where(h => !string.IsNullOrWhiteSpace(h))
|
||||
.Select(NormalizeHash)
|
||||
.Order(StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (sortedHashes.Count == 0)
|
||||
{
|
||||
// Empty signer set gets a well-known hash
|
||||
_signerSetHash = ComputeHash(Encoding.UTF8.GetBytes("empty-signer-set"));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Concatenate sorted hashes and hash the result
|
||||
var concatenated = string.Join("|", sortedHashes);
|
||||
_signerSetHash = ComputeHash(Encoding.UTF8.GetBytes(concatenated));
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets the time window for epoch bucketing.
|
||||
/// </summary>
|
||||
/// <param name="timeWindow">The time window identifier (e.g., "2024-12-24T12:00:00Z").</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithTimeWindow(string timeWindow)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(timeWindow))
|
||||
throw new ArgumentException("Time window cannot be null or empty.", nameof(timeWindow));
|
||||
|
||||
_timeWindow = timeWindow;
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes time window from a timestamp using the configured bucket size.
|
||||
/// </summary>
|
||||
/// <param name="timestamp">The timestamp to bucket.</param>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder WithTimeWindow(DateTimeOffset timestamp)
|
||||
{
|
||||
_timeWindow = _options.ComputeTimeWindow(timestamp);
|
||||
return this;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds the final VeriKey by hashing all components together.
|
||||
/// </summary>
|
||||
/// <returns>The computed VeriKey in format "sha256:<hex>".</returns>
|
||||
/// <exception cref="InvalidOperationException">If required components are missing.</exception>
|
||||
public string Build()
|
||||
{
|
||||
ValidateRequiredComponents();
|
||||
|
||||
// Build composite hash input: all components concatenated with delimiters
|
||||
var components = new StringBuilder();
|
||||
components.Append("v1|"); // Version prefix for future compatibility
|
||||
components.Append(_sourceHash);
|
||||
components.Append('|');
|
||||
components.Append(_sbomHash);
|
||||
components.Append('|');
|
||||
components.Append(_vexHashSetHash);
|
||||
components.Append('|');
|
||||
components.Append(_mergePolicyHash);
|
||||
components.Append('|');
|
||||
components.Append(_signerSetHash);
|
||||
components.Append('|');
|
||||
components.Append(_timeWindow);
|
||||
|
||||
var compositeBytes = Encoding.UTF8.GetBytes(components.ToString());
|
||||
return ComputeHash(compositeBytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a <see cref="VeriKeyComponents"/> record with all individual components.
|
||||
/// Useful for debugging and serialization.
|
||||
/// </summary>
|
||||
/// <returns>A record containing all VeriKey components.</returns>
|
||||
public VeriKeyComponents BuildWithComponents()
|
||||
{
|
||||
ValidateRequiredComponents();
|
||||
|
||||
return new VeriKeyComponents
|
||||
{
|
||||
VeriKey = Build(),
|
||||
SourceHash = _sourceHash!,
|
||||
SbomHash = _sbomHash!,
|
||||
VexHashSetHash = _vexHashSetHash!,
|
||||
MergePolicyHash = _mergePolicyHash!,
|
||||
SignerSetHash = _signerSetHash!,
|
||||
TimeWindow = _timeWindow!
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets the builder to its initial state.
|
||||
/// </summary>
|
||||
/// <returns>This builder for fluent chaining.</returns>
|
||||
public VeriKeyBuilder Reset()
|
||||
{
|
||||
_sourceHash = null;
|
||||
_sbomHash = null;
|
||||
_vexHashSetHash = null;
|
||||
_mergePolicyHash = null;
|
||||
_signerSetHash = null;
|
||||
_timeWindow = null;
|
||||
return this;
|
||||
}
|
||||
|
||||
private void ValidateRequiredComponents()
|
||||
{
|
||||
var missing = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(_sourceHash))
|
||||
missing.Add("SourceHash");
|
||||
if (string.IsNullOrWhiteSpace(_sbomHash))
|
||||
missing.Add("SbomHash");
|
||||
if (string.IsNullOrWhiteSpace(_vexHashSetHash))
|
||||
missing.Add("VexHashSetHash");
|
||||
if (string.IsNullOrWhiteSpace(_mergePolicyHash))
|
||||
missing.Add("MergePolicyHash");
|
||||
if (string.IsNullOrWhiteSpace(_signerSetHash))
|
||||
missing.Add("SignerSetHash");
|
||||
if (string.IsNullOrWhiteSpace(_timeWindow))
|
||||
missing.Add("TimeWindow");
|
||||
|
||||
if (missing.Count > 0)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Cannot build VeriKey: missing required components: {string.Join(", ", missing)}. " +
|
||||
"Use the With* methods to set all required components before calling Build().");
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeHash(ReadOnlySpan<byte> data)
|
||||
{
|
||||
Span<byte> hash = stackalloc byte[32];
|
||||
SHA256.HashData(data, hash);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static string NormalizeHash(string hash)
|
||||
{
|
||||
// If hash already has algorithm prefix, validate and return lowercase
|
||||
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return $"sha256:{hash[7..].ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
// Assume SHA256 if no prefix and looks like a hex string
|
||||
if (hash.Length == 64 && hash.All(c => char.IsAsciiHexDigit(c)))
|
||||
{
|
||||
return $"sha256:{hash.ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
// Return as-is if not recognized (might be other hash format)
|
||||
return hash.ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record containing all VeriKey components for debugging and serialization.
|
||||
/// </summary>
|
||||
public sealed record VeriKeyComponents
|
||||
{
|
||||
/// <summary>
|
||||
/// The final computed VeriKey.
|
||||
/// </summary>
|
||||
public required string VeriKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source artifact digest.
|
||||
/// </summary>
|
||||
public required string SourceHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM canonical hash.
|
||||
/// </summary>
|
||||
public required string SbomHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX hash set hash.
|
||||
/// </summary>
|
||||
public required string VexHashSetHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy bundle hash.
|
||||
/// </summary>
|
||||
public required string MergePolicyHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signer certificate set hash.
|
||||
/// </summary>
|
||||
public required string SignerSetHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time window identifier.
|
||||
/// </summary>
|
||||
public required string TimeWindow { get; init; }
|
||||
}
|
||||
262
src/__Libraries/StellaOps.Provcache/WriteBehindQueue.cs
Normal file
262
src/__Libraries/StellaOps.Provcache/WriteBehindQueue.cs
Normal file
@@ -0,0 +1,262 @@
|
||||
using System.Threading.Channels;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that manages write-behind persistence for Provcache entries.
|
||||
/// Batches writes to Postgres for efficiency and provides retry logic for transient failures.
|
||||
/// </summary>
|
||||
public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
{
|
||||
private readonly Channel<WriteBehindItem> _channel;
|
||||
private readonly IProvcacheRepository _repository;
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly ILogger<WriteBehindQueue> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
// Metrics
|
||||
private long _totalEnqueued;
|
||||
private long _totalPersisted;
|
||||
private long _totalFailed;
|
||||
private long _totalRetries;
|
||||
private long _totalBatches;
|
||||
private long _currentQueueDepth;
|
||||
|
||||
public WriteBehindQueue(
|
||||
IProvcacheRepository repository,
|
||||
IOptions<ProvcacheOptions> options,
|
||||
ILogger<WriteBehindQueue> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
|
||||
// Bounded channel to provide backpressure
|
||||
_channel = Channel.CreateBounded<WriteBehindItem>(new BoundedChannelOptions(_options.WriteBehindQueueCapacity)
|
||||
{
|
||||
FullMode = BoundedChannelFullMode.Wait,
|
||||
SingleWriter = false,
|
||||
SingleReader = true
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask EnqueueAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
var item = new WriteBehindItem
|
||||
{
|
||||
Entry = entry,
|
||||
EnqueuedAt = _timeProvider.GetUtcNow(),
|
||||
RetryCount = 0
|
||||
};
|
||||
|
||||
Interlocked.Increment(ref _totalEnqueued);
|
||||
Interlocked.Increment(ref _currentQueueDepth);
|
||||
|
||||
return _channel.Writer.WriteAsync(item, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public WriteBehindMetrics GetMetrics()
|
||||
{
|
||||
return new WriteBehindMetrics
|
||||
{
|
||||
TotalEnqueued = Interlocked.Read(ref _totalEnqueued),
|
||||
TotalPersisted = Interlocked.Read(ref _totalPersisted),
|
||||
TotalFailed = Interlocked.Read(ref _totalFailed),
|
||||
TotalRetries = Interlocked.Read(ref _totalRetries),
|
||||
TotalBatches = Interlocked.Read(ref _totalBatches),
|
||||
CurrentQueueDepth = Interlocked.Read(ref _currentQueueDepth),
|
||||
Timestamp = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Main processing loop that reads from the channel and batches writes.
|
||||
/// </summary>
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Write-behind queue started with batch size {BatchSize}, interval {IntervalMs}ms",
|
||||
_options.WriteBehindBatchSize,
|
||||
_options.WriteBehindFlushIntervalMs);
|
||||
|
||||
var batch = new List<WriteBehindItem>(_options.WriteBehindBatchSize);
|
||||
var flushInterval = TimeSpan.FromMilliseconds(_options.WriteBehindFlushIntervalMs);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
batch.Clear();
|
||||
|
||||
// Read items until batch is full or timeout
|
||||
using var cts = CancellationTokenSource.CreateLinkedTokenSource(stoppingToken);
|
||||
cts.CancelAfter(flushInterval);
|
||||
|
||||
try
|
||||
{
|
||||
while (batch.Count < _options.WriteBehindBatchSize)
|
||||
{
|
||||
var item = await _channel.Reader.ReadAsync(cts.Token).ConfigureAwait(false);
|
||||
batch.Add(item);
|
||||
Interlocked.Decrement(ref _currentQueueDepth);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
// Timeout reached, process current batch
|
||||
}
|
||||
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
await ProcessBatchAsync(batch, stoppingToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error in write-behind queue processing loop");
|
||||
await Task.Delay(1000, stoppingToken).ConfigureAwait(false); // Backoff on error
|
||||
}
|
||||
}
|
||||
|
||||
// Drain remaining items on shutdown
|
||||
await DrainAsync(CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Write-behind queue stopped");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Processes a batch of items with retry logic.
|
||||
/// </summary>
|
||||
private async Task ProcessBatchAsync(List<WriteBehindItem> batch, CancellationToken cancellationToken)
|
||||
{
|
||||
var entries = batch.Select(b => b.Entry).ToList();
|
||||
|
||||
try
|
||||
{
|
||||
await _repository.UpsertManyAsync(entries, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
Interlocked.Add(ref _totalPersisted, batch.Count);
|
||||
Interlocked.Increment(ref _totalBatches);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Write-behind batch persisted {Count} entries",
|
||||
batch.Count);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Write-behind batch failed for {Count} entries, scheduling retries",
|
||||
batch.Count);
|
||||
|
||||
// Re-enqueue failed items for retry
|
||||
foreach (var item in batch)
|
||||
{
|
||||
if (item.RetryCount < _options.WriteBehindMaxRetries)
|
||||
{
|
||||
var retryItem = item with { RetryCount = item.RetryCount + 1 };
|
||||
Interlocked.Increment(ref _totalRetries);
|
||||
|
||||
if (_channel.Writer.TryWrite(retryItem))
|
||||
{
|
||||
Interlocked.Increment(ref _currentQueueDepth);
|
||||
}
|
||||
else
|
||||
{
|
||||
Interlocked.Increment(ref _totalFailed);
|
||||
_logger.LogError(
|
||||
"Write-behind queue full, dropping entry for VeriKey {VeriKey}",
|
||||
item.Entry.VeriKey);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Interlocked.Increment(ref _totalFailed);
|
||||
_logger.LogError(
|
||||
"Write-behind max retries exceeded for VeriKey {VeriKey}",
|
||||
item.Entry.VeriKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Drains remaining items from the queue during shutdown.
|
||||
/// </summary>
|
||||
private async Task DrainAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var batch = new List<WriteBehindItem>();
|
||||
|
||||
while (_channel.Reader.TryRead(out var item))
|
||||
{
|
||||
batch.Add(item);
|
||||
Interlocked.Decrement(ref _currentQueueDepth);
|
||||
|
||||
if (batch.Count >= _options.WriteBehindBatchSize)
|
||||
{
|
||||
await ProcessBatchAsync(batch, cancellationToken).ConfigureAwait(false);
|
||||
batch.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
if (batch.Count > 0)
|
||||
{
|
||||
await ProcessBatchAsync(batch, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogInformation("Write-behind queue drained");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for write-behind queue operations.
|
||||
/// </summary>
|
||||
public interface IWriteBehindQueue
|
||||
{
|
||||
/// <summary>
|
||||
/// Enqueues an entry for asynchronous persistence.
|
||||
/// </summary>
|
||||
ValueTask EnqueueAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets current queue metrics.
|
||||
/// </summary>
|
||||
WriteBehindMetrics GetMetrics();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Item in the write-behind queue with retry metadata.
|
||||
/// </summary>
|
||||
internal sealed record WriteBehindItem
|
||||
{
|
||||
public required ProvcacheEntry Entry { get; init; }
|
||||
public required DateTimeOffset EnqueuedAt { get; init; }
|
||||
public required int RetryCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for the write-behind queue.
|
||||
/// </summary>
|
||||
public sealed record WriteBehindMetrics
|
||||
{
|
||||
public required long TotalEnqueued { get; init; }
|
||||
public required long TotalPersisted { get; init; }
|
||||
public required long TotalFailed { get; init; }
|
||||
public required long TotalRetries { get; init; }
|
||||
public required long TotalBatches { get; init; }
|
||||
public required long CurrentQueueDepth { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
Reference in New Issue
Block a user