Implement MongoDB-based storage for Pack Run approval, artifact, log, and state management
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Added MongoPackRunApprovalStore for managing approval states with MongoDB.
- Introduced MongoPackRunArtifactUploader for uploading and storing artifacts.
- Created MongoPackRunLogStore to handle logging of pack run events.
- Developed MongoPackRunStateStore for persisting and retrieving pack run states.
- Implemented unit tests for MongoDB stores to ensure correct functionality.
- Added MongoTaskRunnerTestContext for setting up MongoDB test environment.
- Enhanced PackRunStateFactory to correctly initialize state with gate reasons.
This commit is contained in:
master
2025-11-07 10:01:35 +02:00
parent e5ffcd6535
commit a1ce3f74fa
122 changed files with 8730 additions and 914 deletions

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics.Metrics;
@@ -65,6 +66,58 @@ public sealed class AdvisoryPipelineExecutorTests : IDisposable
saved.Prompt.Should().Be("{\"prompt\":\"value\"}");
}
[Fact]
public async Task ExecuteAsync_RecordsTelemetryMeasurements()
{
using var listener = new MeterListener();
var doubleMeasurements = new List<(string Name, double Value, IEnumerable<KeyValuePair<string, object?>> Tags)>();
var longMeasurements = new List<(string Name, long Value, IEnumerable<KeyValuePair<string, object?>> Tags)>();
listener.InstrumentPublished = (instrument, l) =>
{
if (instrument.Meter.Name == AdvisoryPipelineMetrics.MeterName)
{
l.EnableMeasurementEvents(instrument);
}
};
listener.SetMeasurementEventCallback<double>((instrument, measurement, tags, state) =>
{
doubleMeasurements.Add((instrument.Name, measurement, tags));
});
listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) =>
{
longMeasurements.Add((instrument.Name, measurement, tags));
});
listener.Start();
var plan = BuildMinimalPlan(cacheKey: "CACHE-3");
var assembler = new StubPromptAssembler();
var guardrail = new StubGuardrailPipeline(blocked: true);
var store = new InMemoryAdvisoryOutputStore();
using var metrics = new AdvisoryPipelineMetrics(_meterFactory);
var executor = new AdvisoryPipelineExecutor(assembler, guardrail, store, metrics, TimeProvider.System);
var message = new AdvisoryTaskQueueMessage(plan.CacheKey, plan.Request);
await executor.ExecuteAsync(plan, message, planFromCache: false, CancellationToken.None);
listener.Dispose();
longMeasurements.Should().Contain(measurement =>
measurement.Name == "advisory_ai_guardrail_blocks_total" &&
measurement.Value == 1);
longMeasurements.Should().Contain(measurement =>
measurement.Name == "advisory_ai_validation_failures_total" &&
measurement.Value == 1);
doubleMeasurements.Should().Contain(measurement =>
measurement.Name == "advisory_ai_citation_coverage_ratio" &&
Math.Abs(measurement.Value - 1d) < 0.0001);
}
private static AdvisoryTaskPlan BuildMinimalPlan(string cacheKey)
{
var request = new AdvisoryTaskRequest(

View File

@@ -0,0 +1,176 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.AdvisoryAI.Caching;
using StellaOps.AdvisoryAI.Context;
using StellaOps.AdvisoryAI.Documents;
using StellaOps.AdvisoryAI.Guardrails;
using StellaOps.AdvisoryAI.Hosting;
using StellaOps.AdvisoryAI.Outputs;
using StellaOps.AdvisoryAI.Orchestration;
using StellaOps.AdvisoryAI.Tools;
using Xunit;
namespace StellaOps.AdvisoryAI.Tests;
public sealed class FileSystemAdvisoryPersistenceTests : IDisposable
{
private readonly TempDirectory _tempDir = new();
[Fact]
public async Task PlanCache_PersistsPlanOnDisk()
{
var serviceOptions = Options.Create(new AdvisoryAiServiceOptions
{
Storage = new AdvisoryAiStorageOptions
{
PlanCacheDirectory = Path.Combine(_tempDir.Path, "plans"),
OutputDirectory = Path.Combine(_tempDir.Path, "outputs")
}
});
var cacheOptions = Options.Create(new AdvisoryPlanCacheOptions
{
DefaultTimeToLive = TimeSpan.FromMinutes(5),
CleanupInterval = TimeSpan.FromMinutes(5)
});
var cache = new FileSystemAdvisoryPlanCache(serviceOptions, cacheOptions, NullLogger<FileSystemAdvisoryPlanCache>.Instance);
var plan = CreatePlan("cache-123");
await cache.SetAsync(plan.CacheKey, plan, CancellationToken.None);
var reloaded = await cache.TryGetAsync(plan.CacheKey, CancellationToken.None);
reloaded.Should().NotBeNull();
reloaded!.CacheKey.Should().Be(plan.CacheKey);
reloaded.Request.AdvisoryKey.Should().Be(plan.Request.AdvisoryKey);
reloaded.StructuredChunks.Length.Should().Be(plan.StructuredChunks.Length);
reloaded.Metadata.Should().ContainKey("advisory_key").WhoseValue.Should().Be("adv-key");
}
[Fact]
public async Task OutputStore_PersistsOutputOnDisk()
{
var serviceOptions = Options.Create(new AdvisoryAiServiceOptions
{
Storage = new AdvisoryAiStorageOptions
{
PlanCacheDirectory = Path.Combine(_tempDir.Path, "plans"),
OutputDirectory = Path.Combine(_tempDir.Path, "outputs")
}
});
var store = new FileSystemAdvisoryOutputStore(serviceOptions, NullLogger<FileSystemAdvisoryOutputStore>.Instance);
var plan = CreatePlan("cache-abc");
var prompt = "{\"prompt\":\"value\"}";
var guardrail = AdvisoryGuardrailResult.Allowed(prompt);
var output = new AdvisoryPipelineOutput(
plan.CacheKey,
plan.Request.TaskType,
plan.Request.Profile,
prompt,
ImmutableArray.Create(new AdvisoryPromptCitation(1, "doc-1", "chunk-1")),
ImmutableDictionary<string, string>.Empty.Add("advisory_key", plan.Request.AdvisoryKey),
guardrail,
new AdvisoryDsseProvenance(plan.CacheKey, "hash", ImmutableArray<string>.Empty),
DateTimeOffset.UtcNow,
planFromCache: false);
await store.SaveAsync(output, CancellationToken.None);
var reloaded = await store.TryGetAsync(plan.CacheKey, plan.Request.TaskType, plan.Request.Profile, CancellationToken.None);
reloaded.Should().NotBeNull();
reloaded!.Prompt.Should().Be(prompt);
reloaded.Metadata.Should().ContainKey("advisory_key").WhoseValue.Should().Be(plan.Request.AdvisoryKey);
}
private static AdvisoryTaskPlan CreatePlan(string cacheKey)
{
var request = new AdvisoryTaskRequest(
AdvisoryTaskType.Summary,
advisoryKey: "adv-key",
artifactId: "artifact-1",
artifactPurl: "pkg:docker/sample@1.0.0",
policyVersion: "policy-1",
profile: "default",
preferredSections: new[] { "Summary" },
forceRefresh: false);
var chunk = AdvisoryChunk.Create("doc-1", "doc-1:chunk-1", "Summary", "para-1", "Summary text", new Dictionary<string, string> { ["section"] = "Summary" });
var structured = ImmutableArray.Create(chunk);
var vectorMatch = new VectorRetrievalMatch("doc-1", "doc-1:chunk-1", "Summary text", 0.95, new Dictionary<string, string>());
var vectorResult = new AdvisoryVectorResult("summary-query", ImmutableArray.Create(vectorMatch));
var sbom = SbomContextResult.Create(
"artifact-1",
"pkg:docker/sample@1.0.0",
new[]
{
new SbomVersionTimelineEntry("1.0.0", DateTimeOffset.UtcNow.AddDays(-10), null, "affected", "scanner")
},
new[]
{
new SbomDependencyPath(
new[]
{
new SbomDependencyNode("root", "1.0.0"),
new SbomDependencyNode("runtime-lib", "2.1.0")
},
isRuntime: true)
});
var dependency = DependencyAnalysisResult.Create(
"artifact-1",
new[]
{
new DependencyNodeSummary("runtime-lib", new[] { "2.1.0" }, 1, 0)
},
new Dictionary<string, string> { ["artifact_id"] = "artifact-1" });
var metadata = ImmutableDictionary<string, string>.Empty.Add("advisory_key", "adv-key");
var budget = new AdvisoryTaskBudget { PromptTokens = 1024, CompletionTokens = 256 };
return new AdvisoryTaskPlan(
request,
cacheKey,
promptTemplate: "prompts/advisory/summary.liquid",
structured,
ImmutableArray.Create(vectorResult),
sbom,
dependency,
budget,
metadata);
}
public void Dispose()
{
_tempDir.Dispose();
}
private sealed class TempDirectory : IDisposable
{
public TempDirectory()
{
Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"advisory-ai-tests-{Guid.NewGuid():N}");
Directory.CreateDirectory(Path);
}
public string Path { get; }
public void Dispose()
{
try
{
if (Directory.Exists(Path))
{
Directory.Delete(Path, recursive: true);
}
}
catch
{
// ignore cleanup failures in tests
}
}
}
}