save progress
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -83,6 +84,11 @@ public sealed class LlmInferenceCacheOptions
|
||||
/// </summary>
|
||||
public int MaxContentLength { get; set; } = 100_000;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of cache entries (0 = unlimited).
|
||||
/// </summary>
|
||||
public int MaxEntries { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to use sliding expiration.
|
||||
/// </summary>
|
||||
@@ -194,7 +200,8 @@ public sealed class InMemoryLlmInferenceCache : ILlmInferenceCache, IDisposable
|
||||
{
|
||||
if (_cache.TryGetValue(key, out var entry))
|
||||
{
|
||||
if (entry.ExpiresAt > _timeProvider.GetUtcNow())
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
if (entry.ExpiresAt > now)
|
||||
{
|
||||
Interlocked.Increment(ref _hits);
|
||||
Interlocked.Add(ref _tokensSaved, entry.Result.OutputTokens ?? 0);
|
||||
@@ -202,7 +209,8 @@ public sealed class InMemoryLlmInferenceCache : ILlmInferenceCache, IDisposable
|
||||
// Update access time for sliding expiration
|
||||
if (_options.SlidingExpiration)
|
||||
{
|
||||
entry.AccessedAt = _timeProvider.GetUtcNow();
|
||||
entry.AccessedAt = now;
|
||||
entry.ExpiresAt = ApplySlidingExpiration(entry, now);
|
||||
}
|
||||
|
||||
_logger.LogDebug("Cache hit for key {Key}", key);
|
||||
@@ -246,6 +254,11 @@ public sealed class InMemoryLlmInferenceCache : ILlmInferenceCache, IDisposable
|
||||
|
||||
var key = ComputeCacheKey(request, providerId);
|
||||
var ttl = result.Deterministic ? _options.DefaultTtl : _options.ShortTtl;
|
||||
ttl = ClampTtl(ttl);
|
||||
if (ttl <= TimeSpan.Zero)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var entry = new CacheEntry
|
||||
@@ -253,12 +266,14 @@ public sealed class InMemoryLlmInferenceCache : ILlmInferenceCache, IDisposable
|
||||
Result = result,
|
||||
CreatedAt = now,
|
||||
AccessedAt = now,
|
||||
ExpiresAt = now.Add(ttl)
|
||||
ExpiresAt = now.Add(ttl),
|
||||
Ttl = ttl
|
||||
};
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_cache[key] = entry;
|
||||
EnforceCacheLimit();
|
||||
}
|
||||
|
||||
Interlocked.Increment(ref _sets);
|
||||
@@ -328,7 +343,7 @@ public sealed class InMemoryLlmInferenceCache : ILlmInferenceCache, IDisposable
|
||||
|
||||
// Include temperature and max tokens in key
|
||||
sb.Append(':');
|
||||
sb.Append(request.Temperature.ToString("F2"));
|
||||
sb.Append(request.Temperature.ToString("F2", CultureInfo.InvariantCulture));
|
||||
sb.Append(':');
|
||||
sb.Append(request.MaxTokens);
|
||||
|
||||
@@ -372,12 +387,75 @@ public sealed class InMemoryLlmInferenceCache : ILlmInferenceCache, IDisposable
|
||||
_cleanupTimer.Dispose();
|
||||
}
|
||||
|
||||
private TimeSpan ClampTtl(TimeSpan ttl)
|
||||
{
|
||||
if (ttl <= TimeSpan.Zero)
|
||||
{
|
||||
return TimeSpan.Zero;
|
||||
}
|
||||
|
||||
if (_options.MaxTtl > TimeSpan.Zero && ttl > _options.MaxTtl)
|
||||
{
|
||||
return _options.MaxTtl;
|
||||
}
|
||||
|
||||
return ttl;
|
||||
}
|
||||
|
||||
private DateTimeOffset ApplySlidingExpiration(CacheEntry entry, DateTimeOffset now)
|
||||
{
|
||||
if (entry.Ttl <= TimeSpan.Zero)
|
||||
{
|
||||
return entry.ExpiresAt;
|
||||
}
|
||||
|
||||
var proposed = now.Add(entry.Ttl);
|
||||
if (_options.MaxTtl > TimeSpan.Zero)
|
||||
{
|
||||
var maxAllowed = entry.CreatedAt.Add(_options.MaxTtl);
|
||||
if (proposed > maxAllowed)
|
||||
{
|
||||
return maxAllowed;
|
||||
}
|
||||
}
|
||||
|
||||
return proposed;
|
||||
}
|
||||
|
||||
private void EnforceCacheLimit()
|
||||
{
|
||||
if (_options.MaxEntries <= 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var removeCount = _cache.Count - _options.MaxEntries;
|
||||
if (removeCount <= 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var keysToRemove = _cache
|
||||
.OrderBy(entry => entry.Value.AccessedAt)
|
||||
.ThenBy(entry => entry.Value.CreatedAt)
|
||||
.ThenBy(entry => entry.Key, StringComparer.Ordinal)
|
||||
.Take(removeCount)
|
||||
.Select(entry => entry.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in keysToRemove)
|
||||
{
|
||||
_cache.Remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class CacheEntry
|
||||
{
|
||||
public required LlmCompletionResult Result { get; init; }
|
||||
public DateTimeOffset CreatedAt { get; init; }
|
||||
public DateTimeOffset AccessedAt { get; set; }
|
||||
public DateTimeOffset ExpiresAt { get; init; }
|
||||
public DateTimeOffset ExpiresAt { get; set; }
|
||||
public TimeSpan Ttl { get; init; }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.AdvisoryAI.Inference;
|
||||
|
||||
@@ -142,6 +144,13 @@ public sealed class SignedModelBundleManager : ISignedModelBundleManager
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower
|
||||
};
|
||||
|
||||
private readonly TimeProvider _clock;
|
||||
|
||||
public SignedModelBundleManager(TimeProvider? clock = null)
|
||||
{
|
||||
_clock = clock ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<SigningResult> SignBundleAsync(
|
||||
string bundlePath,
|
||||
IModelBundleSigner signer,
|
||||
@@ -166,11 +175,14 @@ public sealed class SignedModelBundleManager : ISignedModelBundleManager
|
||||
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
|
||||
var manifestDigest = ComputeSha256(manifestBytes);
|
||||
|
||||
var signedAt = _clock.GetUtcNow();
|
||||
var signedAtValue = signedAt.ToString("O", CultureInfo.InvariantCulture);
|
||||
|
||||
// Create the payload (manifest digest + metadata)
|
||||
var payload = new
|
||||
{
|
||||
manifest_digest = manifestDigest,
|
||||
signed_at = DateTime.UtcNow.ToString("o"),
|
||||
signed_at = signedAtValue,
|
||||
bundle_path = Path.GetFileName(bundlePath)
|
||||
};
|
||||
var payloadJson = JsonSerializer.Serialize(payload, JsonOptions);
|
||||
@@ -182,7 +194,7 @@ public sealed class SignedModelBundleManager : ISignedModelBundleManager
|
||||
var signature = await signer.SignAsync(pae, cancellationToken);
|
||||
var signatureBase64 = Convert.ToBase64String(signature);
|
||||
|
||||
var signatureId = $"{signer.CryptoScheme}-{DateTime.UtcNow:yyyyMMddHHmmss}-{manifestDigest[..8]}";
|
||||
var signatureId = $"{signer.CryptoScheme}-{signedAt.UtcDateTime:yyyyMMddHHmmss}-{manifestDigest[..8]}";
|
||||
|
||||
// Create DSSE envelope
|
||||
var envelope = new ModelBundleSignatureEnvelope
|
||||
@@ -205,13 +217,13 @@ public sealed class SignedModelBundleManager : ISignedModelBundleManager
|
||||
await File.WriteAllTextAsync(envelopePath, envelopeJson, cancellationToken);
|
||||
|
||||
// Update manifest with signature info
|
||||
var manifest = await File.ReadAllTextAsync(manifestPath, cancellationToken);
|
||||
var manifestObj = JsonSerializer.Deserialize<Dictionary<string, object>>(manifest);
|
||||
if (manifestObj != null)
|
||||
var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken);
|
||||
var manifestNode = JsonNode.Parse(manifestJson);
|
||||
if (manifestNode is JsonObject manifestObject)
|
||||
{
|
||||
manifestObj["signature_id"] = signatureId;
|
||||
manifestObj["crypto_scheme"] = signer.CryptoScheme;
|
||||
var updatedManifest = JsonSerializer.Serialize(manifestObj, JsonOptions);
|
||||
manifestObject["signature_id"] = signatureId;
|
||||
manifestObject["crypto_scheme"] = signer.CryptoScheme;
|
||||
var updatedManifest = manifestObject.ToJsonString(JsonOptions);
|
||||
await File.WriteAllTextAsync(manifestPath, updatedManifest, cancellationToken);
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
|
||||
10
src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md
Normal file
10
src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Advisory AI Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0017-M | DONE | Maintainability audit for StellaOps.AdvisoryAI. |
|
||||
| AUDIT-0017-T | DONE | Test coverage audit for StellaOps.AdvisoryAI. |
|
||||
| AUDIT-0017-A | DONE | Pending approval for changes. |
|
||||
@@ -0,0 +1,172 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AdvisoryAI.Inference.LlmProviders;
|
||||
using Xunit;
|
||||
|
||||
using StellaOps.TestKit;
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public class LlmInferenceCacheTests
|
||||
{
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task CacheKey_UsesInvariantCulture()
|
||||
{
|
||||
var originalCulture = CultureInfo.CurrentCulture;
|
||||
var originalUiCulture = CultureInfo.CurrentUICulture;
|
||||
|
||||
try
|
||||
{
|
||||
var options = Options.Create(new LlmInferenceCacheOptions
|
||||
{
|
||||
DeterministicOnly = false,
|
||||
DefaultTtl = TimeSpan.FromMinutes(5),
|
||||
ShortTtl = TimeSpan.FromMinutes(5)
|
||||
});
|
||||
var cache = new InMemoryLlmInferenceCache(options, NullLogger<InMemoryLlmInferenceCache>.Instance, new FakeTimeProvider());
|
||||
|
||||
var request = new LlmCompletionRequest
|
||||
{
|
||||
UserPrompt = "hello",
|
||||
Temperature = 0.1,
|
||||
MaxTokens = 10,
|
||||
Model = "model-x"
|
||||
};
|
||||
var result = new LlmCompletionResult
|
||||
{
|
||||
Content = "ok",
|
||||
ModelId = "model-x",
|
||||
ProviderId = "openai",
|
||||
Deterministic = false
|
||||
};
|
||||
|
||||
CultureInfo.CurrentCulture = new CultureInfo("de-DE");
|
||||
CultureInfo.CurrentUICulture = new CultureInfo("de-DE");
|
||||
await cache.SetAsync(request, "openai", result, CancellationToken.None);
|
||||
|
||||
CultureInfo.CurrentCulture = new CultureInfo("en-US");
|
||||
CultureInfo.CurrentUICulture = new CultureInfo("en-US");
|
||||
var cached = await cache.TryGetAsync(request, "openai", CancellationToken.None);
|
||||
|
||||
Assert.NotNull(cached);
|
||||
}
|
||||
finally
|
||||
{
|
||||
CultureInfo.CurrentCulture = originalCulture;
|
||||
CultureInfo.CurrentUICulture = originalUiCulture;
|
||||
}
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task SlidingExpiration_ExtendsExpiry()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 30, 12, 0, 0, TimeSpan.Zero));
|
||||
var options = Options.Create(new LlmInferenceCacheOptions
|
||||
{
|
||||
DeterministicOnly = false,
|
||||
SlidingExpiration = true,
|
||||
DefaultTtl = TimeSpan.FromMinutes(10),
|
||||
MaxTtl = TimeSpan.FromMinutes(30)
|
||||
});
|
||||
var cache = new InMemoryLlmInferenceCache(options, NullLogger<InMemoryLlmInferenceCache>.Instance, timeProvider);
|
||||
|
||||
var request = new LlmCompletionRequest
|
||||
{
|
||||
UserPrompt = "hello",
|
||||
Temperature = 0.0,
|
||||
MaxTokens = 10,
|
||||
Model = "model-x"
|
||||
};
|
||||
var result = new LlmCompletionResult
|
||||
{
|
||||
Content = "ok",
|
||||
ModelId = "model-x",
|
||||
ProviderId = "openai",
|
||||
Deterministic = true
|
||||
};
|
||||
|
||||
await cache.SetAsync(request, "openai", result, CancellationToken.None);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(9));
|
||||
var first = await cache.TryGetAsync(request, "openai", CancellationToken.None);
|
||||
Assert.NotNull(first);
|
||||
|
||||
timeProvider.Advance(TimeSpan.FromMinutes(6));
|
||||
var second = await cache.TryGetAsync(request, "openai", CancellationToken.None);
|
||||
Assert.NotNull(second);
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task MaxEntries_EvictsOldestEntries()
|
||||
{
|
||||
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 30, 13, 0, 0, TimeSpan.Zero));
|
||||
var options = Options.Create(new LlmInferenceCacheOptions
|
||||
{
|
||||
DeterministicOnly = false,
|
||||
MaxEntries = 1,
|
||||
DefaultTtl = TimeSpan.FromMinutes(10),
|
||||
ShortTtl = TimeSpan.FromMinutes(10)
|
||||
});
|
||||
var cache = new InMemoryLlmInferenceCache(options, NullLogger<InMemoryLlmInferenceCache>.Instance, timeProvider);
|
||||
|
||||
var request1 = new LlmCompletionRequest
|
||||
{
|
||||
UserPrompt = "hello",
|
||||
Temperature = 0.0,
|
||||
MaxTokens = 10,
|
||||
Model = "model-x"
|
||||
};
|
||||
var request2 = new LlmCompletionRequest
|
||||
{
|
||||
UserPrompt = "world",
|
||||
Temperature = 0.0,
|
||||
MaxTokens = 10,
|
||||
Model = "model-x"
|
||||
};
|
||||
var result = new LlmCompletionResult
|
||||
{
|
||||
Content = "ok",
|
||||
ModelId = "model-x",
|
||||
ProviderId = "openai",
|
||||
Deterministic = true
|
||||
};
|
||||
|
||||
await cache.SetAsync(request1, "openai", result, CancellationToken.None);
|
||||
timeProvider.Advance(TimeSpan.FromSeconds(1));
|
||||
await cache.SetAsync(request2, "openai", result, CancellationToken.None);
|
||||
|
||||
var evicted = await cache.TryGetAsync(request1, "openai", CancellationToken.None);
|
||||
var retained = await cache.TryGetAsync(request2, "openai", CancellationToken.None);
|
||||
|
||||
Assert.Null(evicted);
|
||||
Assert.NotNull(retained);
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset current;
|
||||
|
||||
public FakeTimeProvider()
|
||||
: this(DateTimeOffset.UtcNow)
|
||||
{
|
||||
}
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset start)
|
||||
{
|
||||
current = start;
|
||||
}
|
||||
|
||||
public void Advance(TimeSpan delta)
|
||||
{
|
||||
current = current.Add(delta);
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using StellaOps.AdvisoryAI.Inference.LlmProviders;
|
||||
using Xunit;
|
||||
|
||||
using StellaOps.TestKit;
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public class LlmProviderConfigValidationTests
|
||||
{
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void OpenAiConfigValidation_FailsWithoutApiKey()
|
||||
{
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["enabled"] = "true",
|
||||
["api:baseUrl"] = "https://api.openai.com/v1"
|
||||
})
|
||||
.Build();
|
||||
|
||||
var plugin = new OpenAiLlmProviderPlugin();
|
||||
var validation = plugin.ValidateConfiguration(configuration);
|
||||
|
||||
Assert.False(validation.IsValid);
|
||||
Assert.Contains(validation.Errors, error => error.Contains("API key", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public void OpenAiConfigValidation_WarnsWhenDisabled()
|
||||
{
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection(new Dictionary<string, string?>
|
||||
{
|
||||
["enabled"] = "false"
|
||||
})
|
||||
.Build();
|
||||
|
||||
var plugin = new OpenAiLlmProviderPlugin();
|
||||
var validation = plugin.ValidateConfiguration(configuration);
|
||||
|
||||
Assert.True(validation.IsValid);
|
||||
Assert.Contains(validation.Warnings, warning => warning.Contains("disabled", StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.AdvisoryAI.Inference;
|
||||
using Xunit;
|
||||
|
||||
using StellaOps.TestKit;
|
||||
namespace StellaOps.AdvisoryAI.Tests;
|
||||
|
||||
public class SignedModelBundleManagerTests
|
||||
{
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Fact]
|
||||
public async Task SignBundleAsync_UsesDeterministicTimestamp()
|
||||
{
|
||||
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-ai", Guid.NewGuid().ToString("N"));
|
||||
Directory.CreateDirectory(tempRoot);
|
||||
|
||||
try
|
||||
{
|
||||
var manifestPath = Path.Combine(tempRoot, "manifest.json");
|
||||
await File.WriteAllTextAsync(manifestPath, CreateManifestJson(), CancellationToken.None);
|
||||
|
||||
var fixedTime = new DateTimeOffset(2025, 12, 31, 12, 34, 56, TimeSpan.Zero);
|
||||
var manager = new SignedModelBundleManager(new FakeTimeProvider(fixedTime));
|
||||
var signer = new FakeSigner("key-1", "ed25519");
|
||||
|
||||
var result = await manager.SignBundleAsync(tempRoot, signer, CancellationToken.None);
|
||||
|
||||
Assert.True(result.Success);
|
||||
Assert.StartsWith("ed25519-20251231123456-", result.SignatureId, StringComparison.Ordinal);
|
||||
|
||||
var envelopePath = Path.Combine(tempRoot, "signature.dsse");
|
||||
var envelopeJson = await File.ReadAllTextAsync(envelopePath, CancellationToken.None);
|
||||
var envelope = JsonSerializer.Deserialize<ModelBundleSignatureEnvelope>(envelopeJson);
|
||||
Assert.NotNull(envelope);
|
||||
|
||||
var payloadJson = Encoding.UTF8.GetString(Convert.FromBase64String(envelope!.Payload));
|
||||
using var document = JsonDocument.Parse(payloadJson);
|
||||
var signedAt = document.RootElement.GetProperty("signed_at").GetString();
|
||||
Assert.Equal("2025-12-31T12:34:56.0000000+00:00", signedAt);
|
||||
|
||||
var manifestJson = await File.ReadAllTextAsync(manifestPath, CancellationToken.None);
|
||||
using var manifestDoc = JsonDocument.Parse(manifestJson);
|
||||
Assert.Equal(result.SignatureId, manifestDoc.RootElement.GetProperty("signature_id").GetString());
|
||||
Assert.Equal("ed25519", manifestDoc.RootElement.GetProperty("crypto_scheme").GetString());
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (Directory.Exists(tempRoot))
|
||||
{
|
||||
Directory.Delete(tempRoot, recursive: true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string CreateManifestJson()
|
||||
{
|
||||
return """
|
||||
{
|
||||
"version": "1.0.0",
|
||||
"name": "test-model",
|
||||
"description": "fixture",
|
||||
"license": "MIT",
|
||||
"size_category": "small",
|
||||
"quantizations": ["q4"],
|
||||
"files": [
|
||||
{ "path": "model.bin", "digest": "abc", "size": 1, "type": "model" }
|
||||
],
|
||||
"created_at": "2025-12-01T00:00:00Z"
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
private sealed class FakeSigner : IModelBundleSigner
|
||||
{
|
||||
public FakeSigner(string keyId, string scheme)
|
||||
{
|
||||
KeyId = keyId;
|
||||
CryptoScheme = scheme;
|
||||
}
|
||||
|
||||
public string KeyId { get; }
|
||||
public string CryptoScheme { get; }
|
||||
|
||||
public Task<byte[]> SignAsync(byte[] data, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult(Encoding.UTF8.GetBytes("sig"));
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset fixedNow;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset fixedNow) => this.fixedNow = fixedNow;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => fixedNow;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user