Add impact index fixture and filesystem artifact uploader
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Introduced a sample BOM index JSON file for impact index testing. - Created unit tests for the impact index fixture to ensure proper loading of sample images. - Implemented the FilesystemPackRunArtifactUploader class to handle artifact uploads to the local filesystem. - Added comprehensive tests for the FilesystemPackRunArtifactUploader, covering file copying, missing files, and expression outputs.
This commit is contained in:
@@ -18,7 +18,6 @@ using StellaOps.Excititor.Policy;
|
||||
using StellaOps.Excititor.Storage.Mongo;
|
||||
using StellaOps.Excititor.WebService.Endpoints;
|
||||
using StellaOps.Excititor.WebService.Services;
|
||||
using StellaOps.Excititor.Core;
|
||||
using StellaOps.Excititor.Core.Aoc;
|
||||
|
||||
var builder = WebApplication.CreateBuilder(args);
|
||||
|
||||
@@ -2,6 +2,8 @@ If you are working on this file you need to read docs/modules/excititor/ARCHITEC
|
||||
# TASKS
|
||||
| Task | Owner(s) | Depends on | Notes |
|
||||
|---|---|---|---|
|
||||
|EXCITITOR-ATTEST-01-003 – Verification suite & observability|Team Excititor Attestation|EXCITITOR-ATTEST-01-002|DOING (2025-10-22) – Continuing implementation: build `IVexAttestationVerifier`, wire metrics/logging, and add regression tests. Draft plan in `EXCITITOR-ATTEST-01-003-plan.md` (2025-10-19) guides scope; updating with worknotes as progress lands.<br>2025-10-31: Verifier now tolerates duplicate source providers from AOC raw projections, downgrades offline Rekor verification to a degraded result, and enforces trusted signer registry checks with detailed diagnostics/tests.<br>2025-11-05 14:35Z: Picking up diagnostics record/ActivitySource work and aligning metrics dimensions before wiring verifier into WebService/Worker paths.|
|
||||
|EXCITITOR-ATTEST-01-003 – Verification suite & observability|Team Excititor Attestation|EXCITITOR-ATTEST-01-002|TODO (2025-11-06) – Continuing implementation: build `IVexAttestationVerifier`, wire metrics/logging, and add regression tests. Draft plan in `EXCITITOR-ATTEST-01-003-plan.md` (2025-10-19) guides scope; updating with worknotes as progress lands.<br>2025-10-31: Verifier now tolerates duplicate source providers from AOC raw projections, downgrades offline Rekor verification to a degraded result, and enforces trusted signer registry checks with detailed diagnostics/tests.<br>2025-11-05 14:35Z: Picking up diagnostics record/ActivitySource work and aligning metrics dimensions before wiring verifier into WebService/Worker paths.|
|
||||
> 2025-11-05 19:10Z: Worker signature verifier now emits structured diagnostics/metrics via `VexAttestationDiagnostics`; attestation verification results flow into metric labels and logs.
|
||||
> 2025-11-06 07:12Z: Export verifier builds unblocked; Excititor worker + web service test suites pass with diagnostics wiring (`dotnet test` invocations succeed with staged libssl1.1).
|
||||
> 2025-11-06 07:55Z: Paused after documenting OpenSSL shim usage; follow-up automation tracked under `DEVOPS-OPENSSL-11-001/002`.
|
||||
> Remark (2025-10-22): Added verifier implementation + metrics/tests; next steps include wiring into WebService/Worker flows and expanding negative-path coverage.
|
||||
|
||||
@@ -98,6 +98,7 @@ public sealed class VexExportEngine : IExportEngine
|
||||
cached.PolicyDigest,
|
||||
cached.ConsensusDigest,
|
||||
cached.ScoreDigest,
|
||||
cached.QuietProvenance,
|
||||
cached.Attestation,
|
||||
cached.SizeBytes);
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ internal static class VexExportEnvelopeBuilder
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record VexExportEnvelopeContext(
|
||||
public sealed record VexExportEnvelopeContext(
|
||||
ImmutableArray<VexConsensus> Consensus,
|
||||
string ConsensusCanonicalJson,
|
||||
VexContentAddress ConsensusDigest,
|
||||
|
||||
@@ -280,7 +280,7 @@ public sealed class VexMirrorBundlePublisher : IVexMirrorBundlePublisher
|
||||
ToRelativePath(mirrorRoot, manifestPath),
|
||||
manifestBytes.LongLength,
|
||||
ComputeDigest(manifestBytes),
|
||||
signature: null);
|
||||
Signature: null);
|
||||
|
||||
var bundleDescriptor = manifestDocument.Bundle with
|
||||
{
|
||||
@@ -298,7 +298,7 @@ public sealed class VexMirrorBundlePublisher : IVexMirrorBundlePublisher
|
||||
manifestDocument.DomainId,
|
||||
manifestDocument.DisplayName,
|
||||
manifestDocument.GeneratedAt,
|
||||
manifestDocument.Exports.Length,
|
||||
manifestDocument.Exports.Count,
|
||||
manifestDescriptor,
|
||||
bundleDescriptor,
|
||||
exportKeys));
|
||||
@@ -474,6 +474,11 @@ public sealed class VexMirrorBundlePublisher : IVexMirrorBundlePublisher
|
||||
|
||||
private JsonMirrorSigningContext PrepareSigningContext(MirrorSigningOptions signingOptions)
|
||||
{
|
||||
if (_cryptoRegistry is null)
|
||||
{
|
||||
throw new InvalidOperationException("Mirror signing requires a crypto provider registry to be configured.");
|
||||
}
|
||||
|
||||
var algorithm = string.IsNullOrWhiteSpace(signingOptions.Algorithm)
|
||||
? SignatureAlgorithms.Es256
|
||||
: signingOptions.Algorithm.Trim();
|
||||
@@ -496,7 +501,7 @@ public sealed class VexMirrorBundlePublisher : IVexMirrorBundlePublisher
|
||||
var provider = ResolveProvider(algorithm, providerHint);
|
||||
var signingKey = LoadSigningKey(signingOptions, provider, algorithm);
|
||||
provider.UpsertSigningKey(signingKey);
|
||||
resolved = _cryptoRegistry.ResolveSigner(CryptoCapability.Signing, algorithm, new CryptoKeyReference(keyId, provider.Name), provider.Name);
|
||||
resolved = _cryptoRegistry!.ResolveSigner(CryptoCapability.Signing, algorithm, new CryptoKeyReference(keyId, provider.Name), provider.Name);
|
||||
}
|
||||
|
||||
return new JsonMirrorSigningContext(resolved.Signer, algorithm, resolved.ProviderName, _timeProvider);
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCAN-REPLAY-186-001 | TODO | Scanner WebService Guild | REPLAY-CORE-185-001 | Implement scan `record` mode producing replay manifests/bundles, capture policy/feed/tool hashes, and update `docs/modules/scanner/architecture.md` referencing `docs/replay/DETERMINISTIC_REPLAY.md` Section 6. | API/worker integration tests cover record mode; docs merged; replay artifacts stored per spec. |
|
||||
| SCANNER-SURFACE-02 | DONE (2025-11-05) | Scanner WebService Guild | SURFACE-FS-02 | Publish Surface.FS pointers (CAS URIs, manifests) via scan/report APIs and update attestation metadata.<br>2025-11-05: Surface pointers projected through scan/report endpoints, orchestrator samples + DSSE fixtures refreshed with manifest block, readiness tests updated to use validator stub. | OpenAPI updated; clients regenerated; integration tests validate pointer presence and tenancy. |
|
||||
| SCANNER-ENV-02 | DOING (2025-11-02) | Scanner WebService Guild, Ops Guild | SURFACE-ENV-02 | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration.<br>2025-11-02: Cache root resolution switched to helper; feature flag bindings updated; Helm/Compose updates pending review.<br>2025-11-05 14:55Z: Aligning readiness checks, docs, and Helm/Compose templates with Surface.Env outputs and planning test coverage for configuration fallbacks.<br>2025-11-06 17:05Z: Surface.Env documentation/README refreshed; warning catalogue captured for ops handoff. | Service uses helper; env table documented; helm/compose templates updated. |
|
||||
| SCANNER-ENV-02 | TODO (2025-11-06) | Scanner WebService Guild, Ops Guild | SURFACE-ENV-02 | Wire Surface.Env helpers into WebService hosting (cache roots, feature flags) and document configuration.<br>2025-11-02: Cache root resolution switched to helper; feature flag bindings updated; Helm/Compose updates pending review.<br>2025-11-05 14:55Z: Aligning readiness checks, docs, and Helm/Compose templates with Surface.Env outputs and planning test coverage for configuration fallbacks.<br>2025-11-06 17:05Z: Surface.Env documentation/README refreshed; warning catalogue captured for ops handoff.<br>2025-11-06 07:45Z: Helm values (dev/stage/prod/airgap/mirror) and Compose examples updated with `SCANNER_SURFACE_*` defaults plus rollout warning note in `deploy/README.md`.<br>2025-11-06 07:55Z: Paused; follow-up automation captured under `DEVOPS-OPENSSL-11-001/002` and pending Surface.Env readiness tests. | Service uses helper; env table documented; helm/compose templates updated. |
|
||||
> 2025-11-05 19:18Z: Added configurator to project wiring and unit test ensuring Surface.Env cache root is honoured.
|
||||
| SCANNER-SECRETS-02 | DOING (2025-11-02) | Scanner WebService Guild, Security Guild | SURFACE-SECRETS-02 | Replace ad-hoc secret wiring with Surface.Secrets for report/export operations (registry and CAS tokens).<br>2025-11-02: Export/report flows now depend on Surface.Secrets stub; integration tests in progress. | Secrets fetched through shared provider; unit/integration tests cover rotation + failure cases. |
|
||||
| SCANNER-EVENTS-16-301 | BLOCKED (2025-10-26) | Scanner WebService Guild | ORCH-SVC-38-101, NOTIFY-SVC-38-001 | Emit orchestrator-compatible envelopes (`scanner.event.*`) and update integration tests to verify Notifier ingestion (no Redis queue coupling). | Tests assert envelope schema + orchestrator publish; Notifier consumer harness passes; docs updated with new event contract. Blocked by .NET 10 preview OpenAPI/Auth dependency drift preventing `dotnet test` completion. |
|
||||
|
||||
@@ -4,6 +4,6 @@
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| SCAN-REPLAY-186-002 | TODO | Scanner Worker Guild | REPLAY-CORE-185-001 | Enforce deterministic analyzer execution when consuming replay input bundles, emit layer Merkle metadata, and author `docs/modules/scanner/deterministic-execution.md` summarising invariants from `docs/replay/DETERMINISTIC_REPLAY.md` Section 4. | Replay mode analyzers pass determinism tests; new doc merged; integration fixtures updated. |
|
||||
| SCANNER-SURFACE-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-FS-02 | Persist Surface.FS manifests after analyzer stages, including layer CAS metadata and EntryTrace fragments.<br>2025-11-02: Draft Surface.FS manifests emitted for sample scans; telemetry counters under review. | Integration tests prove cache entries exist; telemetry counters exported. |
|
||||
| SCANNER-ENV-01 | DOING (2025-11-02) | Scanner Worker Guild | SURFACE-ENV-02 | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints.<br>2025-11-02: Worker bootstrap now resolves cache roots via helper; warning path documented; smoke tests running.<br>2025-11-05 14:55Z: Extending helper usage into cache/secrets configuration, updating worker validator wiring, and drafting docs/tests for new Surface.Env outputs.<br>2025-11-06 17:05Z: README/design docs updated with warning catalogue; startup logging guidance captured for ops runbooks. | Worker boots with helper; misconfiguration warnings documented; smoke tests updated. |
|
||||
| SCANNER-ENV-01 | TODO (2025-11-06) | Scanner Worker Guild | SURFACE-ENV-02 | Replace ad-hoc environment reads with `StellaOps.Scanner.Surface.Env` helpers for cache roots and CAS endpoints.<br>2025-11-02: Worker bootstrap now resolves cache roots via helper; warning path documented; smoke tests running.<br>2025-11-05 14:55Z: Extending helper usage into cache/secrets configuration, updating worker validator wiring, and drafting docs/tests for new Surface.Env outputs.<br>2025-11-06 17:05Z: README/design docs updated with warning catalogue; startup logging guidance captured for ops runbooks.<br>2025-11-06 07:45Z: Helm/Compose env profiles (dev/stage/prod/airgap/mirror) now seed `SCANNER_SURFACE_*` defaults to keep worker cache roots aligned with Surface.Env helpers.<br>2025-11-06 07:55Z: Paused; pending automation tracked via `DEVOPS-OPENSSL-11-001/002` and Surface.Env test fixtures. | Worker boots with helper; misconfiguration warnings documented; smoke tests updated. |
|
||||
> 2025-11-05 19:18Z: Bound `SurfaceCacheOptions` root directory to resolved Surface.Env settings and added unit coverage around the configurator.
|
||||
| SCANNER-SECRETS-01 | DOING (2025-11-02) | Scanner Worker Guild, Security Guild | SURFACE-SECRETS-02 | Adopt `StellaOps.Scanner.Surface.Secrets` for registry/CAS credentials during scan execution.<br>2025-11-02: Surface.Secrets provider wired for CAS token retrieval; integration tests added. | Secrets fetched via shared provider; legacy secret code removed; integration tests cover rotation. |
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"schema": "scheduler-impact-index@1",
|
||||
"generatedAt": "2025-10-01T00:00:00Z",
|
||||
"image": {
|
||||
"repository": "registry.stellaops.test/team/sample-service",
|
||||
"digest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
|
||||
"tag": "1.0.0"
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"purl": "pkg:docker/sample-service@1.0.0",
|
||||
"usage": [
|
||||
"runtime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"purl": "pkg:pypi/requests@2.31.0",
|
||||
"usage": [
|
||||
"usedByEntrypoint"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -8,6 +8,7 @@
|
||||
<ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="Fixtures\**\*.json" />
|
||||
<EmbeddedResource Include="..\..\samples\scanner\images\**\bom-index.json"
|
||||
Link="Fixtures\%(RecursiveDir)%(Filename)%(Extension)" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Scheduler.ImpactIndex;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
public sealed class ImpactIndexFixtureTests
|
||||
{
|
||||
[Fact]
|
||||
public void FixtureDirectoryExists()
|
||||
{
|
||||
var fixtureDirectory = GetFixtureDirectory();
|
||||
Assert.True(Directory.Exists(fixtureDirectory), $"Fixture directory not found: {fixtureDirectory}");
|
||||
|
||||
var files = Directory.EnumerateFiles(fixtureDirectory, "bom-index.json", SearchOption.AllDirectories).ToArray();
|
||||
Assert.NotEmpty(files);
|
||||
|
||||
var sampleFile = Path.Combine(fixtureDirectory, "sample", "bom-index.json");
|
||||
Assert.Contains(sampleFile, files);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FixtureImpactIndexLoadsSampleImage()
|
||||
{
|
||||
var fixtureDirectory = GetFixtureDirectory();
|
||||
var options = new ImpactIndexStubOptions
|
||||
{
|
||||
FixtureDirectory = fixtureDirectory,
|
||||
SnapshotId = "tests/impact-index-stub"
|
||||
};
|
||||
|
||||
var index = new FixtureImpactIndex(options, TimeProvider.System, NullLogger<FixtureImpactIndex>.Instance);
|
||||
var selector = new Selector(SelectorScope.AllImages);
|
||||
|
||||
var impactSet = await index.ResolveAllAsync(selector, usageOnly: false);
|
||||
|
||||
Assert.True(impactSet.Total > 0, "Expected the fixture impact index to load at least one image.");
|
||||
}
|
||||
|
||||
private static string GetFixtureDirectory()
|
||||
{
|
||||
var assemblyLocation = typeof(SchedulerWebApplicationFactory).Assembly.Location;
|
||||
var assemblyDirectory = Path.GetDirectoryName(assemblyLocation)
|
||||
?? AppContext.BaseDirectory;
|
||||
|
||||
return Path.GetFullPath(Path.Combine(assemblyDirectory, "seed-data", "impact-index"));
|
||||
}
|
||||
}
|
||||
@@ -11,16 +11,16 @@ using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Scheduler.Models;
|
||||
using StellaOps.Scheduler.Queue;
|
||||
using StellaOps.Scheduler.Storage.Mongo.Repositories;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
public sealed class RunEndpointTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
private readonly WebApplicationFactory<Program> _factory;
|
||||
|
||||
public RunEndpointTests(WebApplicationFactory<Program> factory)
|
||||
{
|
||||
_factory = factory;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
public sealed class RunEndpointTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
private readonly WebApplicationFactory<Program> _factory;
|
||||
|
||||
public RunEndpointTests(WebApplicationFactory<Program> factory)
|
||||
{
|
||||
_factory = factory;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -100,13 +100,13 @@ public sealed class RunEndpointTests : IClassFixture<WebApplicationFactory<Progr
|
||||
var scheduleId = scheduleJson.GetProperty("schedule").GetProperty("id").GetString();
|
||||
Assert.False(string.IsNullOrEmpty(scheduleId));
|
||||
|
||||
var previewResponse = await client.PostAsJsonAsync("/api/v1/scheduler/runs/preview", new
|
||||
{
|
||||
scheduleId,
|
||||
usageOnly = true,
|
||||
sampleSize = 3
|
||||
});
|
||||
|
||||
var previewResponse = await client.PostAsJsonAsync("/api/v1/scheduler/runs/preview", new
|
||||
{
|
||||
scheduleId,
|
||||
usageOnly = true,
|
||||
sampleSize = 3
|
||||
});
|
||||
|
||||
previewResponse.EnsureSuccessStatusCode();
|
||||
var preview = await previewResponse.Content.ReadFromJsonAsync<JsonElement>();
|
||||
Assert.True(preview.GetProperty("total").GetInt32() >= 0);
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Scheduler.WebService.Options;
|
||||
using StellaOps.Scheduler.WebService.Runs;
|
||||
using StellaOps.Scheduler.ImpactIndex;
|
||||
|
||||
namespace StellaOps.Scheduler.WebService.Tests;
|
||||
|
||||
@@ -15,6 +18,8 @@ public sealed class SchedulerWebApplicationFactory : WebApplicationFactory<Progr
|
||||
{
|
||||
builder.ConfigureAppConfiguration((_, configuration) =>
|
||||
{
|
||||
var fixtureDirectory = GetFixtureDirectory();
|
||||
|
||||
configuration.AddInMemoryCollection(new[]
|
||||
{
|
||||
new KeyValuePair<string, string?>("Scheduler:Authority:Enabled", "false"),
|
||||
@@ -27,12 +32,22 @@ public sealed class SchedulerWebApplicationFactory : WebApplicationFactory<Progr
|
||||
new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Excitor:Enabled", "true"),
|
||||
new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Excitor:HmacSecret", "excitor-secret"),
|
||||
new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Excitor:RateLimitRequests", "20"),
|
||||
new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Excitor:RateLimitWindowSeconds", "60")
|
||||
new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Excitor:RateLimitWindowSeconds", "60"),
|
||||
new KeyValuePair<string, string?>("Scheduler:ImpactIndex:FixtureDirectory", fixtureDirectory)
|
||||
});
|
||||
});
|
||||
|
||||
builder.ConfigureServices(services =>
|
||||
{
|
||||
var fixtureDirectory = GetFixtureDirectory();
|
||||
|
||||
services.RemoveAll<ImpactIndexStubOptions>();
|
||||
services.AddSingleton(new ImpactIndexStubOptions
|
||||
{
|
||||
FixtureDirectory = fixtureDirectory,
|
||||
SnapshotId = "tests/impact-index-stub"
|
||||
});
|
||||
|
||||
services.Configure<SchedulerEventsOptions>(options =>
|
||||
{
|
||||
options.Webhooks ??= new SchedulerInboundWebhooksOptions();
|
||||
@@ -52,4 +67,14 @@ public sealed class SchedulerWebApplicationFactory : WebApplicationFactory<Progr
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private static string GetFixtureDirectory()
|
||||
{
|
||||
var assemblyLocation = typeof(SchedulerWebApplicationFactory).Assembly.Location;
|
||||
var assemblyDirectory = Path.GetDirectoryName(assemblyLocation)
|
||||
?? AppContext.BaseDirectory;
|
||||
|
||||
var fixtureDirectory = Path.Combine(assemblyDirectory, "seed-data", "impact-index");
|
||||
return Path.GetFullPath(fixtureDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,4 +18,9 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<Content Include="seed-data/impact-index/**">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</Content>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"schema": "scheduler-impact-index@1",
|
||||
"generatedAt": "2025-10-01T00:00:00Z",
|
||||
"image": {
|
||||
"repository": "registry.stellaops.test/team/sample-service",
|
||||
"digest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef",
|
||||
"tag": "1.0.0"
|
||||
},
|
||||
"components": [
|
||||
{
|
||||
"purl": "pkg:docker/sample-service@1.0.0",
|
||||
"usage": [
|
||||
"runtime"
|
||||
]
|
||||
},
|
||||
{
|
||||
"purl": "pkg:pypi/requests@2.31.0",
|
||||
"usage": [
|
||||
"usedByEntrypoint"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,239 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
|
||||
namespace StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
|
||||
/// <summary>
|
||||
/// Stores pack run artifacts on the local file system so they can be mirrored to the eventual remote store.
|
||||
/// </summary>
|
||||
public sealed class FilesystemPackRunArtifactUploader : IPackRunArtifactUploader
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
private readonly string rootPath;
|
||||
private readonly ILogger<FilesystemPackRunArtifactUploader> logger;
|
||||
private readonly TimeProvider timeProvider;
|
||||
|
||||
public FilesystemPackRunArtifactUploader(
|
||||
string rootPath,
|
||||
TimeProvider? timeProvider,
|
||||
ILogger<FilesystemPackRunArtifactUploader> logger)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
|
||||
|
||||
this.rootPath = Path.GetFullPath(rootPath);
|
||||
this.logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
this.timeProvider = timeProvider ?? TimeProvider.System;
|
||||
|
||||
Directory.CreateDirectory(this.rootPath);
|
||||
}
|
||||
|
||||
public async Task UploadAsync(
|
||||
PackRunExecutionContext context,
|
||||
PackRunState state,
|
||||
IReadOnlyList<TaskPackPlanOutput> outputs,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
ArgumentNullException.ThrowIfNull(state);
|
||||
ArgumentNullException.ThrowIfNull(outputs);
|
||||
|
||||
if (outputs.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var destinationRoot = Path.Combine(rootPath, SanitizeFileName(context.RunId));
|
||||
var filesRoot = Path.Combine(destinationRoot, "files");
|
||||
var expressionsRoot = Path.Combine(destinationRoot, "expressions");
|
||||
|
||||
Directory.CreateDirectory(destinationRoot);
|
||||
|
||||
var manifest = new ArtifactManifest(
|
||||
context.RunId,
|
||||
timeProvider.GetUtcNow(),
|
||||
new List<ArtifactRecord>(outputs.Count));
|
||||
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var record = await ProcessOutputAsync(
|
||||
context,
|
||||
output,
|
||||
destinationRoot,
|
||||
filesRoot,
|
||||
expressionsRoot,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
manifest.Outputs.Add(record);
|
||||
}
|
||||
|
||||
var manifestPath = Path.Combine(destinationRoot, "artifact-manifest.json");
|
||||
await using (var stream = File.Open(manifestPath, FileMode.Create, FileAccess.Write, FileShare.None))
|
||||
{
|
||||
await JsonSerializer.SerializeAsync(stream, manifest, SerializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
logger.LogInformation(
|
||||
"Pack run {RunId} artifact manifest written to {Path} with {Count} output entries.",
|
||||
context.RunId,
|
||||
manifestPath,
|
||||
manifest.Outputs.Count);
|
||||
}
|
||||
|
||||
private async Task<ArtifactRecord> ProcessOutputAsync(
|
||||
PackRunExecutionContext context,
|
||||
TaskPackPlanOutput output,
|
||||
string destinationRoot,
|
||||
string filesRoot,
|
||||
string expressionsRoot,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var sourcePath = ResolveString(output.Path);
|
||||
var expressionNode = ResolveExpression(output.Expression);
|
||||
var status = "skipped";
|
||||
string? storedPath = null;
|
||||
string? notes = null;
|
||||
|
||||
if (IsFileOutput(output))
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(sourcePath))
|
||||
{
|
||||
status = "unresolved";
|
||||
notes = "Output path requires runtime value.";
|
||||
}
|
||||
else if (!File.Exists(sourcePath))
|
||||
{
|
||||
status = "missing";
|
||||
notes = $"Source file '{sourcePath}' not found.";
|
||||
logger.LogWarning(
|
||||
"Pack run {RunId} output {Output} referenced missing file {Path}.",
|
||||
context.RunId,
|
||||
output.Name,
|
||||
sourcePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
Directory.CreateDirectory(filesRoot);
|
||||
|
||||
var destinationPath = Path.Combine(filesRoot, DetermineDestinationFileName(output, sourcePath));
|
||||
Directory.CreateDirectory(Path.GetDirectoryName(destinationPath)!);
|
||||
|
||||
await CopyFileAsync(sourcePath, destinationPath, cancellationToken).ConfigureAwait(false);
|
||||
storedPath = GetRelativePath(destinationPath, destinationRoot);
|
||||
status = "copied";
|
||||
|
||||
logger.LogInformation(
|
||||
"Pack run {RunId} output {Output} copied to {Destination}.",
|
||||
context.RunId,
|
||||
output.Name,
|
||||
destinationPath);
|
||||
}
|
||||
}
|
||||
|
||||
if (expressionNode is not null)
|
||||
{
|
||||
Directory.CreateDirectory(expressionsRoot);
|
||||
|
||||
var expressionPath = Path.Combine(
|
||||
expressionsRoot,
|
||||
$"{SanitizeFileName(output.Name)}.json");
|
||||
|
||||
var json = expressionNode.ToJsonString(SerializerOptions);
|
||||
await File.WriteAllTextAsync(expressionPath, json, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
storedPath ??= GetRelativePath(expressionPath, destinationRoot);
|
||||
status = status == "copied" ? "copied" : "materialized";
|
||||
}
|
||||
|
||||
return new ArtifactRecord(
|
||||
output.Name,
|
||||
output.Type,
|
||||
sourcePath,
|
||||
storedPath,
|
||||
status,
|
||||
notes);
|
||||
}
|
||||
|
||||
private static async Task CopyFileAsync(string sourcePath, string destinationPath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var source = File.Open(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
await using var destination = File.Open(destinationPath, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await source.CopyToAsync(destination, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static bool IsFileOutput(TaskPackPlanOutput output)
|
||||
=> string.Equals(output.Type, "file", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static string DetermineDestinationFileName(TaskPackPlanOutput output, string sourcePath)
|
||||
{
|
||||
var extension = Path.GetExtension(sourcePath);
|
||||
var baseName = SanitizeFileName(output.Name);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(extension) &&
|
||||
!baseName.EndsWith(extension, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return baseName + extension;
|
||||
}
|
||||
|
||||
return baseName;
|
||||
}
|
||||
|
||||
private static string? ResolveString(TaskPackPlanParameterValue? parameter)
|
||||
{
|
||||
if (parameter is null || parameter.RequiresRuntimeValue || parameter.Value is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (parameter.Value is JsonValue jsonValue && jsonValue.TryGetValue<string>(out var value))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static JsonNode? ResolveExpression(TaskPackPlanParameterValue? parameter)
|
||||
{
|
||||
if (parameter is null || parameter.RequiresRuntimeValue)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return parameter.Value;
|
||||
}
|
||||
|
||||
private static string SanitizeFileName(string value)
|
||||
{
|
||||
var result = value;
|
||||
foreach (var invalid in Path.GetInvalidFileNameChars())
|
||||
{
|
||||
result = result.Replace(invalid, '_');
|
||||
}
|
||||
|
||||
return string.IsNullOrWhiteSpace(result) ? "output" : result;
|
||||
}
|
||||
|
||||
private static string GetRelativePath(string path, string root)
|
||||
=> Path.GetRelativePath(root, path)
|
||||
.Replace('\\', '/');
|
||||
|
||||
private sealed record ArtifactManifest(string RunId, DateTimeOffset UploadedAt, List<ArtifactRecord> Outputs);
|
||||
|
||||
private sealed record ArtifactRecord(
|
||||
string Name,
|
||||
string Type,
|
||||
string? SourcePath,
|
||||
string? StoredPath,
|
||||
string Status,
|
||||
string? Notes);
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.TaskRunner.Core.Execution;
|
||||
using StellaOps.TaskRunner.Core.Planning;
|
||||
using StellaOps.TaskRunner.Infrastructure.Execution;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.TaskRunner.Tests;
|
||||
|
||||
public sealed class FilesystemPackRunArtifactUploaderTests : IDisposable
|
||||
{
|
||||
private readonly string artifactsRoot;
|
||||
|
||||
public FilesystemPackRunArtifactUploaderTests()
|
||||
{
|
||||
artifactsRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("n"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CopiesFileOutputs()
|
||||
{
|
||||
var sourceFile = Path.Combine(Path.GetTempPath(), $"{Guid.NewGuid():n}.txt");
|
||||
await File.WriteAllTextAsync(sourceFile, "artifact-content", TestContext.Current.CancellationToken);
|
||||
|
||||
var uploader = CreateUploader();
|
||||
var output = CreateFileOutput("bundle", sourceFile);
|
||||
var context = CreateContext();
|
||||
var state = CreateState(context);
|
||||
|
||||
await uploader.UploadAsync(context, state, new[] { output }, TestContext.Current.CancellationToken);
|
||||
|
||||
var runPath = Path.Combine(artifactsRoot, context.RunId);
|
||||
var filesDirectory = Path.Combine(runPath, "files");
|
||||
var copiedFiles = Directory.GetFiles(filesDirectory);
|
||||
Assert.Single(copiedFiles);
|
||||
Assert.Equal("bundle.txt", Path.GetFileName(copiedFiles[0]));
|
||||
Assert.Equal("artifact-content", await File.ReadAllTextAsync(copiedFiles[0], TestContext.Current.CancellationToken));
|
||||
|
||||
var manifest = await ReadManifestAsync(runPath);
|
||||
Assert.Single(manifest.Outputs);
|
||||
Assert.Equal("copied", manifest.Outputs[0].Status);
|
||||
Assert.Equal("files/bundle.txt", manifest.Outputs[0].StoredPath);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordsMissingFilesWithoutThrowing()
|
||||
{
|
||||
var uploader = CreateUploader();
|
||||
var output = CreateFileOutput("missing", Path.Combine(Path.GetTempPath(), "does-not-exist.txt"));
|
||||
var context = CreateContext();
|
||||
var state = CreateState(context);
|
||||
|
||||
await uploader.UploadAsync(context, state, new[] { output }, TestContext.Current.CancellationToken);
|
||||
|
||||
var manifest = await ReadManifestAsync(Path.Combine(artifactsRoot, context.RunId));
|
||||
Assert.Equal("missing", manifest.Outputs[0].Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WritesExpressionOutputsAsJson()
|
||||
{
|
||||
var uploader = CreateUploader();
|
||||
var output = CreateExpressionOutput("metadata", JsonNode.Parse("""{"foo":"bar"}""")!);
|
||||
var context = CreateContext();
|
||||
var state = CreateState(context);
|
||||
|
||||
await uploader.UploadAsync(context, state, new[] { output }, TestContext.Current.CancellationToken);
|
||||
|
||||
var expressionPath = Path.Combine(artifactsRoot, context.RunId, "expressions", "metadata.json");
|
||||
Assert.True(File.Exists(expressionPath));
|
||||
|
||||
var manifest = await ReadManifestAsync(Path.Combine(artifactsRoot, context.RunId));
|
||||
Assert.Equal("materialized", manifest.Outputs[0].Status);
|
||||
Assert.Equal("expressions/metadata.json", manifest.Outputs[0].StoredPath);
|
||||
}
|
||||
|
||||
private FilesystemPackRunArtifactUploader CreateUploader()
|
||||
=> new(artifactsRoot, TimeProvider.System, NullLogger<FilesystemPackRunArtifactUploader>.Instance);
|
||||
|
||||
private static TaskPackPlanOutput CreateFileOutput(string name, string path)
|
||||
=> new(
|
||||
name,
|
||||
Type: "file",
|
||||
Path: new TaskPackPlanParameterValue(JsonValue.Create(path), null, null, false),
|
||||
Expression: null);
|
||||
|
||||
private static TaskPackPlanOutput CreateExpressionOutput(string name, JsonNode expression)
|
||||
=> new(
|
||||
name,
|
||||
Type: "object",
|
||||
Path: null,
|
||||
Expression: new TaskPackPlanParameterValue(expression, null, null, false));
|
||||
|
||||
private static PackRunExecutionContext CreateContext()
|
||||
=> new("run-" + Guid.NewGuid().ToString("n"), CreatePlan(), DateTimeOffset.UtcNow);
|
||||
|
||||
private static PackRunState CreateState(PackRunExecutionContext context)
|
||||
=> PackRunState.Create(
|
||||
runId: context.RunId,
|
||||
planHash: context.Plan.Hash,
|
||||
context.Plan,
|
||||
failurePolicy: new TaskPackPlanFailurePolicy(1, 1, false),
|
||||
requestedAt: DateTimeOffset.UtcNow,
|
||||
steps: new Dictionary<string, PackRunStepStateRecord>(StringComparer.Ordinal),
|
||||
timestamp: DateTimeOffset.UtcNow);
|
||||
|
||||
private static TaskPackPlan CreatePlan()
|
||||
{
|
||||
return new TaskPackPlan(
|
||||
new TaskPackPlanMetadata("sample-pack", "1.0.0", null, Array.Empty<string>()),
|
||||
new Dictionary<string, JsonNode?>(StringComparer.Ordinal),
|
||||
Array.Empty<TaskPackPlanStep>(),
|
||||
hash: "hash",
|
||||
approvals: Array.Empty<TaskPackPlanApproval>(),
|
||||
secrets: Array.Empty<TaskPackPlanSecret>(),
|
||||
outputs: Array.Empty<TaskPackPlanOutput>(),
|
||||
failurePolicy: new TaskPackPlanFailurePolicy(1, 1, false));
|
||||
}
|
||||
|
||||
private static async Task<ArtifactManifestModel> ReadManifestAsync(string runPath)
|
||||
{
|
||||
var json = await File.ReadAllTextAsync(Path.Combine(runPath, "artifact-manifest.json"), TestContext.Current.CancellationToken);
|
||||
return JsonSerializer.Deserialize<ArtifactManifestModel>(json, new JsonSerializerOptions(JsonSerializerDefaults.Web))!;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(artifactsRoot))
|
||||
{
|
||||
Directory.Delete(artifactsRoot, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record ArtifactManifestModel(string RunId, DateTimeOffset UploadedAt, List<ArtifactRecordModel> Outputs);
|
||||
|
||||
private sealed record ArtifactRecordModel(string Name, string Type, string? SourcePath, string? StoredPath, string Status, string? Notes);
|
||||
}
|
||||
@@ -51,7 +51,13 @@ builder.Services.AddSingleton<IPackRunStepExecutor, NoopPackRunStepExecutor>();
|
||||
builder.Services.AddSingleton<PackRunExecutionGraphBuilder>();
|
||||
builder.Services.AddSingleton<PackRunSimulationEngine>();
|
||||
builder.Services.AddSingleton<PackRunProcessor>();
|
||||
builder.Services.AddSingleton<IPackRunArtifactUploader, LoggingPackRunArtifactUploader>();
|
||||
builder.Services.AddSingleton<IPackRunArtifactUploader>(sp =>
|
||||
{
|
||||
var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>().Value;
|
||||
var timeProvider = sp.GetService<TimeProvider>();
|
||||
var logger = sp.GetRequiredService<ILogger<FilesystemPackRunArtifactUploader>>();
|
||||
return new FilesystemPackRunArtifactUploader(options.ArtifactsPath, timeProvider, logger);
|
||||
});
|
||||
builder.Services.AddHostedService<PackRunWorkerService>();
|
||||
|
||||
var host = builder.Build();
|
||||
|
||||
@@ -4,11 +4,13 @@ public sealed class PackRunWorkerOptions
|
||||
{
|
||||
public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(1);
|
||||
|
||||
public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue");
|
||||
|
||||
public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive");
|
||||
|
||||
public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue");
|
||||
|
||||
public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive");
|
||||
|
||||
public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals");
|
||||
|
||||
public string RunStatePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "state", "runs");
|
||||
|
||||
public string ArtifactsPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "artifacts");
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Google.Cloud.Kms.V1;
|
||||
using Google.Protobuf;
|
||||
using Google.Protobuf.WellKnownTypes;
|
||||
|
||||
namespace StellaOps.Cryptography.Kms;
|
||||
|
||||
|
||||
@@ -271,7 +271,7 @@ internal sealed class Pkcs11InteropFacade : IPkcs11Facade
|
||||
}
|
||||
catch
|
||||
{
|
||||
# ignore logout failures
|
||||
// ignore logout failures
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -64,9 +64,8 @@ public sealed class Pkcs11Options
|
||||
/// <summary>
|
||||
/// Gets or sets an optional factory for advanced facade injection (testing, custom providers).
|
||||
/// </summary>
|
||||
public Func<IServiceProvider, IPkcs11Facade>? FacadeFactory { get; set; }
|
||||
internal Func<IServiceProvider, IPkcs11Facade>? FacadeFactory { get; set; }
|
||||
|
||||
private static TimeSpan EnsurePositive(TimeSpan value, TimeSpan fallback)
|
||||
=> value <= TimeSpan.Zero ? fallback : value;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user